Flutter Engine
The Flutter Engine
kernel_to_il.cc
Go to the documentation of this file.
1// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <utility>
8
10#include "platform/assert.h"
11#include "platform/globals.h"
12#include "vm/class_id.h"
19#include "vm/compiler/ffi/abi.h"
30#include "vm/kernel_isolate.h"
31#include "vm/kernel_loader.h"
32#include "vm/log.h"
33#include "vm/longjump.h"
34#include "vm/native_entry.h"
35#include "vm/object_store.h"
36#include "vm/report.h"
37#include "vm/resolver.h"
38#include "vm/runtime_entry.h"
39#include "vm/scopes.h"
40#include "vm/stack_frame.h"
41#include "vm/symbols.h"
42
43namespace dart {
44
46 print_huge_methods,
47 false,
48 "Print huge methods (less optimized)");
49
51 force_switch_dispatch_type,
52 -1,
53 "Force switch statements to use a particular dispatch type: "
54 "-1=auto, 0=linear scan, 1=binary search, 2=jump table");
55
56namespace kernel {
57
58#define Z (zone_)
59#define H (translation_helper_)
60#define T (type_translator_)
61#define I Isolate::Current()
62#define IG IsolateGroup::Current()
63
65 ParsedFunction* parsed_function,
67 ZoneGrowableArray<intptr_t>* context_level_array,
68 InlineExitCollector* exit_collector,
69 bool optimizing,
70 intptr_t osr_id,
71 intptr_t first_block_id,
72 bool inlining_unchecked_entry)
73 : BaseFlowGraphBuilder(parsed_function,
74 first_block_id - 1,
75 osr_id,
76 context_level_array,
77 exit_collector,
78 inlining_unchecked_entry),
79 translation_helper_(Thread::Current()),
80 thread_(translation_helper_.thread()),
81 zone_(translation_helper_.zone()),
82 parsed_function_(parsed_function),
83 optimizing_(optimizing),
84 ic_data_array_(*ic_data_array),
85 next_function_id_(0),
86 loop_depth_(0),
87 try_depth_(0),
88 catch_depth_(0),
89 block_expression_depth_(0),
90 graph_entry_(nullptr),
91 scopes_(nullptr),
92 breakable_block_(nullptr),
93 switch_block_(nullptr),
94 try_catch_block_(nullptr),
95 try_finally_block_(nullptr),
96 catch_block_(nullptr),
97 prepend_type_arguments_(Function::ZoneHandle(zone_)) {
98 const auto& info = KernelProgramInfo::Handle(
99 Z, parsed_function->function().KernelProgramInfo());
100 H.InitFromKernelProgramInfo(info);
101}
102
104
105Fragment FlowGraphBuilder::EnterScope(
106 intptr_t kernel_offset,
107 const LocalScope** context_scope /* = nullptr */) {
108 Fragment instructions;
109 const LocalScope* scope = scopes_->scopes.Lookup(kernel_offset);
110 if (scope->num_context_variables() > 0) {
111 instructions += PushContext(scope);
112 instructions += Drop();
113 }
114 if (context_scope != nullptr) {
115 *context_scope = scope;
116 }
117 return instructions;
118}
119
120Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
121 Fragment instructions;
122 const intptr_t context_size =
123 scopes_->scopes.Lookup(kernel_offset)->num_context_variables();
124 if (context_size > 0) {
125 instructions += PopContext();
126 }
127 return instructions;
128}
129
130Fragment FlowGraphBuilder::AdjustContextTo(int depth) {
131 ASSERT(depth <= context_depth_ && depth >= 0);
132 Fragment instructions;
133 if (depth < context_depth_) {
134 instructions += LoadContextAt(depth);
135 instructions += StoreLocal(TokenPosition::kNoSource,
136 parsed_function_->current_context_var());
137 instructions += Drop();
138 context_depth_ = depth;
139 }
140 return instructions;
141}
142
143Fragment FlowGraphBuilder::PushContext(const LocalScope* scope) {
144 ASSERT(scope->num_context_variables() > 0);
145 Fragment instructions = AllocateContext(scope->context_slots());
146 LocalVariable* context = MakeTemporary();
147 instructions += LoadLocal(context);
148 instructions += LoadLocal(parsed_function_->current_context_var());
149 instructions += StoreNativeField(Slot::Context_parent(),
151 instructions += StoreLocal(TokenPosition::kNoSource,
152 parsed_function_->current_context_var());
154 return instructions;
155}
156
157Fragment FlowGraphBuilder::PopContext() {
158 return AdjustContextTo(context_depth_ - 1);
159}
160
161Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
162 // TODO(27590): We could use `active_class_->IsGeneric()`.
163 Fragment instructions;
164 if (scopes_ != nullptr && scopes_->type_arguments_variable != nullptr) {
165#ifdef DEBUG
166 Function& function =
167 Function::Handle(Z, parsed_function_->function().ptr());
168 while (function.IsClosureFunction()) {
169 function = function.parent_function();
170 }
171 ASSERT(function.IsFactory());
172#endif
173 instructions += LoadLocal(scopes_->type_arguments_variable);
174 } else if (parsed_function_->has_receiver_var() &&
175 active_class_.ClassNumTypeArguments() > 0) {
176 ASSERT(!parsed_function_->function().IsFactory());
177 instructions += LoadLocal(parsed_function_->receiver_var());
178 instructions += LoadNativeField(
179 Slot::GetTypeArgumentsSlotFor(thread_, *active_class_.klass));
180 } else {
181 instructions += NullConstant();
182 }
183 return instructions;
184}
185
186// This function is responsible for pushing a type arguments vector which
187// contains all type arguments of enclosing functions prepended to the type
188// arguments of the current function.
189Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
190 Fragment instructions;
191
192 const Function& function = parsed_function_->function();
193
194 if (function.IsGeneric() || function.HasGenericParent()) {
195 ASSERT(parsed_function_->function_type_arguments() != nullptr);
196 instructions += LoadLocal(parsed_function_->function_type_arguments());
197 } else {
198 instructions += NullConstant();
199 }
200
201 return instructions;
202}
203
204Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
205 const TypeArguments& type_arguments) {
206 Fragment instructions;
207
208 auto const mode = type_arguments.GetInstantiationMode(
209 Z, &parsed_function_->function(), active_class_.klass);
210
211 switch (mode) {
213 // There are no type references to type parameters so we can just take it.
214 instructions += Constant(type_arguments);
215 break;
217 // If the instantiator type arguments are just passed on, we don't need to
218 // resolve the type parameters.
219 //
220 // This is for example the case here:
221 // class Foo<T> {
222 // newList() => new List<T>();
223 // }
224 // We just use the type argument vector from the [Foo] object and pass it
225 // directly to the `new List<T>()` factory constructor.
226 instructions += LoadInstantiatorTypeArguments();
227 break;
229 instructions += LoadFunctionTypeArguments();
230 break;
232 // Otherwise we need to resolve [TypeParameterType]s in the type
233 // expression based on the current instantiator type argument vector.
234 if (!type_arguments.IsInstantiated(kCurrentClass)) {
235 instructions += LoadInstantiatorTypeArguments();
236 } else {
237 instructions += NullConstant();
238 }
239 if (!type_arguments.IsInstantiated(kFunctions)) {
240 instructions += LoadFunctionTypeArguments();
241 } else {
242 instructions += NullConstant();
243 }
244 instructions += InstantiateTypeArguments(type_arguments);
245 break;
246 }
247 return instructions;
248}
249
250Fragment FlowGraphBuilder::CatchBlockEntry(const Array& handler_types,
251 intptr_t handler_index,
252 bool needs_stacktrace,
253 bool is_synthesized) {
254 LocalVariable* exception_var = CurrentException();
255 LocalVariable* stacktrace_var = CurrentStackTrace();
256 LocalVariable* raw_exception_var = CurrentRawException();
257 LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
258
259 CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
260 is_synthesized, // whether catch block was synthesized by FE compiler
261 AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
262 handler_index, needs_stacktrace, GetNextDeoptId(), exception_var,
263 stacktrace_var, raw_exception_var, raw_stacktrace_var);
264 graph_entry_->AddCatchEntry(entry);
265
266 Fragment instructions(entry);
267
268 // Auxiliary variables introduced by the try catch can be captured if we are
269 // inside a function with yield/resume points. In this case we first need
270 // to restore the context to match the context at entry into the closure.
271 const bool should_restore_closure_context =
272 CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
273 LocalVariable* context_variable = parsed_function_->current_context_var();
274 if (should_restore_closure_context) {
275 ASSERT(parsed_function_->function().IsClosureFunction());
276
277 LocalVariable* closure_parameter = parsed_function_->ParameterVariable(0);
278 ASSERT(!closure_parameter->is_captured());
279 instructions += LoadLocal(closure_parameter);
280 instructions += LoadNativeField(Slot::Closure_context());
281 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
282 instructions += Drop();
283 }
284
285 if (exception_var->is_captured()) {
286 instructions += LoadLocal(context_variable);
287 instructions += LoadLocal(raw_exception_var);
288 instructions += StoreNativeField(
289 Slot::GetContextVariableSlotFor(thread_, *exception_var));
290 }
291 if (stacktrace_var->is_captured()) {
292 instructions += LoadLocal(context_variable);
293 instructions += LoadLocal(raw_stacktrace_var);
294 instructions += StoreNativeField(
295 Slot::GetContextVariableSlotFor(thread_, *stacktrace_var));
296 }
297
298 // :saved_try_context_var can be captured in the context of
299 // of the closure, in this case CatchBlockEntryInstr restores
300 // :current_context_var to point to closure context in the
301 // same way as normal function prologue does.
302 // Update current context depth to reflect that.
303 const intptr_t saved_context_depth = context_depth_;
304 ASSERT(!CurrentCatchContext()->is_captured() ||
305 CurrentCatchContext()->owner()->context_level() == 0);
306 context_depth_ = 0;
307 instructions += LoadLocal(CurrentCatchContext());
308 instructions += StoreLocal(TokenPosition::kNoSource,
309 parsed_function_->current_context_var());
310 instructions += Drop();
311 context_depth_ = saved_context_depth;
312
313 return instructions;
314}
315
316Fragment FlowGraphBuilder::TryCatch(int try_handler_index) {
317 // The body of the try needs to have it's own block in order to get a new try
318 // index.
319 //
320 // => We therefore create a block for the body (fresh try index) and another
321 // join block (with current try index).
322 Fragment body;
323 JoinEntryInstr* entry = BuildJoinEntry(try_handler_index);
324 body += LoadLocal(parsed_function_->current_context_var());
325 body += StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
326 body += Drop();
327 body += Goto(entry);
328 return Fragment(body.entry, entry);
329}
330
331Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
332 TokenPosition position) {
333 ASSERT(loop_depth_ == 0);
335}
336
337Fragment FlowGraphBuilder::CloneContext(
338 const ZoneGrowableArray<const Slot*>& context_slots) {
339 LocalVariable* context_variable = parsed_function_->current_context_var();
340
341 Fragment instructions = LoadLocal(context_variable);
342
343 CloneContextInstr* clone_instruction = new (Z) CloneContextInstr(
344 InstructionSource(), Pop(), context_slots, GetNextDeoptId());
345 instructions <<= clone_instruction;
346 Push(clone_instruction);
347
348 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
349 instructions += Drop();
350 return instructions;
351}
352
353Fragment FlowGraphBuilder::InstanceCall(
354 TokenPosition position,
355 const String& name,
356 Token::Kind kind,
357 intptr_t type_args_len,
358 intptr_t argument_count,
359 const Array& argument_names,
360 intptr_t checked_argument_count,
361 const Function& interface_target,
362 const Function& tearoff_interface_target,
363 const InferredTypeMetadata* result_type,
364 bool use_unchecked_entry,
365 const CallSiteAttributesMetadata* call_site_attrs,
366 bool receiver_is_not_smi,
367 bool is_call_on_this) {
368 Fragment instructions = RecordCoverage(position);
369 const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0);
370 InputsArray arguments = GetArguments(total_count);
371 InstanceCallInstr* call = new (Z) InstanceCallInstr(
372 InstructionSource(position), name, kind, std::move(arguments),
373 type_args_len, argument_names, checked_argument_count, ic_data_array_,
374 GetNextDeoptId(), interface_target, tearoff_interface_target);
375 if ((result_type != nullptr) && !result_type->IsTrivial()) {
376 call->SetResultType(Z, result_type->ToCompileType(Z));
377 }
378 if (use_unchecked_entry) {
379 call->set_entry_kind(Code::EntryKind::kUnchecked);
380 }
381 if (is_call_on_this) {
382 call->mark_as_call_on_this();
383 }
384 if (call_site_attrs != nullptr && call_site_attrs->receiver_type != nullptr &&
385 call_site_attrs->receiver_type->IsInstantiated()) {
386 call->set_receivers_static_type(call_site_attrs->receiver_type);
387 } else if (!interface_target.IsNull()) {
388 const Class& owner = Class::Handle(Z, interface_target.Owner());
389 const AbstractType& type =
390 AbstractType::ZoneHandle(Z, owner.DeclarationType());
391 call->set_receivers_static_type(&type);
392 }
393 call->set_receiver_is_not_smi(receiver_is_not_smi);
394 Push(call);
395 instructions <<= call;
396 if (result_type != nullptr && result_type->IsConstant()) {
397 instructions += Drop();
398 instructions += Constant(result_type->constant_value);
399 }
400 return instructions;
401}
402
403Fragment FlowGraphBuilder::FfiCall(
404 const compiler::ffi::CallMarshaller& marshaller,
405 bool is_leaf) {
406 Fragment body;
407
408 const intptr_t num_arguments =
410 InputsArray arguments = GetArguments(num_arguments);
411 FfiCallInstr* const call = new (Z)
412 FfiCallInstr(GetNextDeoptId(), marshaller, is_leaf, std::move(arguments));
413 Push(call);
414 body <<= call;
415
416 return body;
417}
418
419Fragment FlowGraphBuilder::CallLeafRuntimeEntry(
420 const RuntimeEntry& entry,
421 Representation return_representation,
422 const ZoneGrowableArray<Representation>& argument_representations) {
423 Fragment body;
424
425 body += LoadThread();
427
428 const intptr_t num_arguments = argument_representations.length() + 1;
429 InputsArray arguments = GetArguments(num_arguments);
430 auto* const call = LeafRuntimeCallInstr::Make(
431 Z, return_representation, argument_representations, std::move(arguments));
432 Push(call);
433 body <<= call;
434
435 return body;
436}
437
438Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
439 int catch_try_index) {
440 Fragment instructions;
441 Value* stacktrace = Pop();
442 Value* exception = Pop();
443 instructions += Fragment(new (Z) ReThrowInstr(
444 InstructionSource(position), catch_try_index,
445 GetNextDeoptId(), exception, stacktrace))
446 .closed();
447 // Use its side effect of leaving a constant on the stack (does not change
448 // the graph).
449 NullConstant();
450
451 return instructions;
452}
453
454Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
455 // Captured 'this' is immutable, so within the outer method we don't need to
456 // load it from the context.
457 const ParsedFunction* pf = parsed_function_;
458 if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
459 variable == pf->receiver_var()) {
460 ASSERT(variable == pf->ParameterVariable(0));
461 variable = pf->RawParameterVariable(0);
462 }
463 if (variable->is_captured()) {
464 Fragment instructions;
465 instructions += LoadContextAt(variable->owner()->context_level());
466 instructions +=
468 return instructions;
469 } else {
470 return BaseFlowGraphBuilder::LoadLocal(variable);
471 }
472}
473
474IndirectGotoInstr* FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
475 Value* index = Pop();
476 return new (Z) IndirectGotoInstr(target_count, index);
477}
478
479Fragment FlowGraphBuilder::ThrowLateInitializationError(
480 TokenPosition position,
481 const char* throw_method_name,
482 const String& name) {
483 const auto& dart_internal = Library::Handle(Z, Library::InternalLibrary());
484 const Class& klass =
485 Class::ZoneHandle(Z, dart_internal.LookupClass(Symbols::LateError()));
486 ASSERT(!klass.IsNull());
487
488 const auto& error = klass.EnsureIsFinalized(thread_);
490 const Function& throw_new =
491 Function::ZoneHandle(Z, klass.LookupStaticFunctionAllowPrivate(
492 H.DartSymbolObfuscate(throw_method_name)));
493 ASSERT(!throw_new.IsNull());
494
495 Fragment instructions;
496
497 // Call LateError._throwFoo.
498 instructions += Constant(name);
499 instructions +=
500 StaticCall(TokenPosition::Synthetic(position.Pos()), throw_new,
501 /* argument_count = */ 1, ICData::kStatic);
502 instructions += Drop();
503
504 return instructions;
505}
506
507Fragment FlowGraphBuilder::StoreLateField(const Field& field,
508 LocalVariable* instance,
509 LocalVariable* setter_value) {
510 Fragment instructions;
511 TargetEntryInstr* is_uninitialized;
512 TargetEntryInstr* is_initialized;
513 const TokenPosition position = field.token_pos();
514 const bool is_static = field.is_static();
515 const bool is_final = field.is_final();
516
517 if (is_final) {
518 // Check whether the field has been initialized already.
519 if (is_static) {
520 instructions += LoadStaticField(field, /*calls_initializer=*/false);
521 } else {
522 instructions += LoadLocal(instance);
523 instructions += LoadField(field, /*calls_initializer=*/false);
524 }
525 instructions += Constant(Object::sentinel());
526 instructions += BranchIfStrictEqual(&is_uninitialized, &is_initialized);
527 JoinEntryInstr* join = BuildJoinEntry();
528
529 {
530 // If the field isn't initialized, do nothing.
531 Fragment initialize(is_uninitialized);
532 initialize += Goto(join);
533 }
534
535 {
536 // If the field is already initialized, throw a LateInitializationError.
537 Fragment already_initialized(is_initialized);
538 already_initialized += ThrowLateInitializationError(
539 position, "_throwFieldAlreadyInitialized",
540 String::ZoneHandle(Z, field.name()));
541 already_initialized += Goto(join);
542 }
543
544 instructions = Fragment(instructions.entry, join);
545 }
546
547 if (!is_static) {
548 instructions += LoadLocal(instance);
549 }
550 instructions += LoadLocal(setter_value);
551 if (is_static) {
552 instructions += StoreStaticField(position, field);
553 } else {
554 instructions += StoreFieldGuarded(field);
555 }
556
557 return instructions;
558}
559
560Fragment FlowGraphBuilder::NativeCall(const String& name,
561 const Function& function) {
562 InlineBailout("kernel::FlowGraphBuilder::NativeCall");
563 // +1 for result placeholder.
564 const intptr_t num_args =
565 function.NumParameters() + (function.IsGeneric() ? 1 : 0) + 1;
566
567 Fragment instructions;
568 instructions += NullConstant(); // Placeholder for the result.
569
570 InputsArray arguments = GetArguments(num_args);
571 NativeCallInstr* call = new (Z) NativeCallInstr(
572 name, function, FLAG_link_natives_lazily,
573 InstructionSource(function.end_token_pos()), std::move(arguments));
574 Push(call);
575 instructions <<= call;
576 return instructions;
577}
578
579Fragment FlowGraphBuilder::Return(TokenPosition position,
580 bool omit_result_type_check) {
581 Fragment instructions;
582 const Function& function = parsed_function_->function();
583
584 // Emit a type check of the return type in checked mode for all functions
585 // and in strong mode for native functions.
586 if (!omit_result_type_check && function.is_old_native()) {
587 const AbstractType& return_type =
588 AbstractType::Handle(Z, function.result_type());
589 instructions += CheckAssignable(return_type, Symbols::FunctionResult());
590 }
591
592 if (NeedsDebugStepCheck(function, position)) {
593 instructions += DebugStepCheck(position);
594 }
595
596 instructions += BaseFlowGraphBuilder::Return(position);
597
598 return instructions;
599}
600
601Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
602 const Function& target,
603 intptr_t argument_count,
604 ICData::RebindRule rebind_rule) {
605 return StaticCall(position, target, argument_count, Array::null_array(),
606 rebind_rule);
607}
608
609void FlowGraphBuilder::SetResultTypeForStaticCall(
610 StaticCallInstr* call,
611 const Function& target,
612 intptr_t argument_count,
613 const InferredTypeMetadata* result_type) {
614 if (call->InitResultType(Z)) {
615 ASSERT((result_type == nullptr) || (result_type->cid == kDynamicCid) ||
616 (result_type->cid == call->result_cid()));
617 return;
618 }
619 if ((result_type != nullptr) && !result_type->IsTrivial()) {
620 call->SetResultType(Z, result_type->ToCompileType(Z));
621 }
622}
623
624Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
625 const Function& target,
626 intptr_t argument_count,
627 const Array& argument_names,
628 ICData::RebindRule rebind_rule,
629 const InferredTypeMetadata* result_type,
630 intptr_t type_args_count,
631 bool use_unchecked_entry) {
632 Fragment instructions = RecordCoverage(position);
633 const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
634 InputsArray arguments = GetArguments(total_count);
635 StaticCallInstr* call = new (Z) StaticCallInstr(
636 InstructionSource(position), target, type_args_count, argument_names,
637 std::move(arguments), ic_data_array_, GetNextDeoptId(), rebind_rule);
638 SetResultTypeForStaticCall(call, target, argument_count, result_type);
639 if (use_unchecked_entry) {
640 call->set_entry_kind(Code::EntryKind::kUnchecked);
641 }
642 Push(call);
643 instructions <<= call;
644 if (result_type != nullptr && result_type->IsConstant()) {
645 instructions += Drop();
646 instructions += Constant(result_type->constant_value);
647 }
648 return instructions;
649}
650
651Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
652 Representation representation,
653 const Function& target,
654 intptr_t argument_count,
655 const Array& argument_names,
656 intptr_t type_args_count) {
657 const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
658 InputsArray arguments = GetArguments(total_count);
659 CachableIdempotentCallInstr* call = new (Z) CachableIdempotentCallInstr(
660 InstructionSource(position), representation, target, type_args_count,
661 argument_names, std::move(arguments), GetNextDeoptId());
662 Push(call);
663 return Fragment(call);
664}
665
666Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
667 Fragment instructions;
668 instructions += StaticCall(
669 position, CompilerState::Current().StringBaseInterpolateSingle(),
670 /* argument_count = */ 1, ICData::kStatic);
671 return instructions;
672}
673
674Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
675 Fragment instructions;
676 instructions +=
677 StaticCall(position, CompilerState::Current().StringBaseInterpolate(),
678 /* argument_count = */ 1, ICData::kStatic);
679 return instructions;
680}
681
682Fragment FlowGraphBuilder::ThrowTypeError() {
683 const Class& klass =
684 Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::TypeError()));
685 ASSERT(!klass.IsNull());
686 GrowableHandlePtrArray<const String> pieces(Z, 3);
687 pieces.Add(Symbols::TypeError());
688 pieces.Add(Symbols::Dot());
689 pieces.Add(H.DartSymbolObfuscate("_create"));
690
691 const Function& constructor = Function::ZoneHandle(
692 Z, klass.LookupConstructorAllowPrivate(
693 String::ZoneHandle(Z, Symbols::FromConcatAll(thread_, pieces))));
694 ASSERT(!constructor.IsNull());
695
696 const String& url = H.DartString(
697 parsed_function_->function().ToLibNamePrefixedQualifiedCString(),
698 Heap::kOld);
699
700 Fragment instructions;
701
702 // Create instance of _TypeError
703 instructions += AllocateObject(TokenPosition::kNoSource, klass, 0);
704 LocalVariable* instance = MakeTemporary();
705
706 // Call _TypeError._create constructor.
707 instructions += LoadLocal(instance); // this
708 instructions += Constant(url); // url
709 instructions += NullConstant(); // line
710 instructions += IntConstant(0); // column
711 instructions += Constant(H.DartSymbolPlain("Malformed type.")); // message
712
713 instructions += StaticCall(TokenPosition::kNoSource, constructor,
714 /* argument_count = */ 5, ICData::kStatic);
715 instructions += Drop();
716
717 // Throw the exception
718 instructions += ThrowException(TokenPosition::kNoSource);
719
720 return instructions;
721}
722
723Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
724 const Function& target,
725 bool incompatible_arguments,
726 bool receiver_pushed) {
727 const Class& owner = Class::Handle(Z, target.Owner());
728 auto& receiver = Instance::ZoneHandle();
729 InvocationMirror::Kind kind = InvocationMirror::Kind::kMethod;
730 if (target.IsImplicitGetterFunction() || target.IsGetterFunction() ||
731 target.IsRecordFieldGetter()) {
733 } else if (target.IsImplicitSetterFunction() || target.IsSetterFunction()) {
735 }
737 if (owner.IsTopLevel()) {
738 if (incompatible_arguments) {
739 receiver = target.UserVisibleSignature();
740 }
741 level = InvocationMirror::Level::kTopLevel;
742 } else {
743 receiver = owner.RareType();
744 if (target.kind() == UntaggedFunction::kConstructor) {
745 level = InvocationMirror::Level::kConstructor;
746 } else if (target.IsRecordFieldGetter()) {
747 level = InvocationMirror::Level::kDynamic;
748 } else {
749 level = InvocationMirror::Level::kStatic;
750 }
751 }
752
753 Fragment instructions;
754 if (!receiver_pushed) {
755 instructions += Constant(receiver); // receiver
756 }
757 instructions +=
758 ThrowNoSuchMethodError(position, String::ZoneHandle(Z, target.name()),
759 level, kind, /*receiver_pushed*/ true);
760 return instructions;
761}
762
763Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
764 const String& selector,
767 bool receiver_pushed) {
768 const Class& klass = Class::ZoneHandle(
769 Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
770 ASSERT(!klass.IsNull());
771 const auto& error = klass.EnsureIsFinalized(H.thread());
773 const Function& throw_function = Function::ZoneHandle(
774 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNew()));
775 ASSERT(!throw_function.IsNull());
776
777 Fragment instructions;
778 if (!receiver_pushed) {
779 instructions += NullConstant(); // receiver
780 }
781 instructions += Constant(selector);
782 instructions += IntConstant(InvocationMirror::EncodeType(level, kind));
783 instructions += IntConstant(0); // type arguments length
784 instructions += NullConstant(); // type arguments
785 instructions += NullConstant(); // arguments
786 instructions += NullConstant(); // argumentNames
787 instructions += StaticCall(position, throw_function, /* argument_count = */ 7,
788 ICData::kNoRebind);
789 return instructions;
790}
791
792LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
793 LocalVariable* local = scopes_->locals.Lookup(kernel_offset);
794 ASSERT(local != nullptr);
795 ASSERT(local->kernel_offset() == kernel_offset);
796 return local;
797}
798
800 const Function& function = parsed_function_->function();
801
802#ifdef DEBUG
803 // Check that all functions that are explicitly marked as recognized with the
804 // vm:recognized annotation are in fact recognized. The check can't be done on
805 // function creation, since the recognized status isn't set until later.
806 if ((function.IsRecognized() !=
808 !function.IsDynamicInvocationForwarder()) {
809 if (function.IsRecognized()) {
810 FATAL("Recognized method %s is not marked with the vm:recognized pragma.",
811 function.ToQualifiedCString());
812 } else {
813 FATAL("Non-recognized method %s is marked with the vm:recognized pragma.",
814 function.ToQualifiedCString());
815 }
816 }
817#endif
818
819 auto& kernel_data = TypedDataView::Handle(Z, function.KernelLibrary());
820 intptr_t kernel_data_program_offset = function.KernelLibraryOffset();
821
822 StreamingFlowGraphBuilder streaming_flow_graph_builder(
823 this, kernel_data, kernel_data_program_offset);
824 auto result = streaming_flow_graph_builder.BuildGraph();
825
827 result->set_coverage_array(coverage_array());
828
829 if (streaming_flow_graph_builder.num_ast_nodes() >
830 FLAG_huge_method_cutoff_in_ast_nodes) {
831 if (FLAG_print_huge_methods) {
833 "Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
834 "been "
835 "disabled, and the compiler might run out of memory. "
836 "Consider refactoring this code into smaller components.\n",
837 function.QualifiedUserVisibleNameCString(),
839 Z, Class::Handle(Z, function.Owner()).library())
840 .url())
841 .ToCString());
842 }
843 result->mark_huge_method();
844 }
845
846 return result;
847}
848
849Fragment FlowGraphBuilder::NativeFunctionBody(const Function& function,
850 LocalVariable* first_parameter) {
851 ASSERT(function.is_old_native());
853 RELEASE_ASSERT(!function.IsClosureFunction()); // Not supported.
854
855 Fragment body;
856 String& name = String::ZoneHandle(Z, function.native_name());
857 if (function.IsGeneric()) {
858 body += LoadLocal(parsed_function_->RawTypeArgumentsVariable());
859 }
860 for (intptr_t i = 0; i < function.NumParameters(); ++i) {
861 body += LoadLocal(parsed_function_->RawParameterVariable(i));
862 }
863 body += NativeCall(name, function);
864 // We typecheck results of native calls for type safety.
865 body +=
866 Return(TokenPosition::kNoSource, /* omit_result_type_check = */ false);
867 return body;
868}
869
872 case kUnboxedInt32x4:
873 case kUnboxedFloat32x4:
874 case kUnboxedFloat64x2:
876 default:
877 return true;
878 }
879}
880
882 auto& state = thread->compiler_state();
884 case kUnboxedFloat:
885 return state.TypedListGetFloat32();
886 case kUnboxedDouble:
887 return state.TypedListGetFloat64();
888 case kUnboxedInt32x4:
889 return state.TypedListGetInt32x4();
890 case kUnboxedFloat32x4:
891 return state.TypedListGetFloat32x4();
892 case kUnboxedFloat64x2:
893 return state.TypedListGetFloat64x2();
894 default:
895 UNREACHABLE();
896 return Object::null_function();
897 }
898}
899
900#define LOAD_NATIVE_FIELD(V) \
901 V(ByteDataViewLength, TypedDataBase_length) \
902 V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
903 V(ByteDataViewTypedData, TypedDataView_typed_data) \
904 V(Finalizer_getCallback, Finalizer_callback) \
905 V(FinalizerBase_getAllEntries, FinalizerBase_all_entries) \
906 V(FinalizerBase_getDetachments, FinalizerBase_detachments) \
907 V(FinalizerEntry_getDetach, FinalizerEntry_detach) \
908 V(FinalizerEntry_getNext, FinalizerEntry_next) \
909 V(FinalizerEntry_getToken, FinalizerEntry_token) \
910 V(FinalizerEntry_getValue, FinalizerEntry_value) \
911 V(NativeFinalizer_getCallback, NativeFinalizer_callback) \
912 V(GrowableArrayLength, GrowableObjectArray_length) \
913 V(ReceivePort_getSendPort, ReceivePort_send_port) \
914 V(ReceivePort_getHandler, ReceivePort_handler) \
915 V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
916 V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
917 V(LinkedHashBase_getData, LinkedHashBase_data) \
918 V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
919 V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
920 V(LinkedHashBase_getIndex, LinkedHashBase_index) \
921 V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
922 V(ObjectArrayLength, Array_length) \
923 V(Record_shape, Record_shape) \
924 V(SuspendState_getFunctionData, SuspendState_function_data) \
925 V(SuspendState_getThenCallback, SuspendState_then_callback) \
926 V(SuspendState_getErrorCallback, SuspendState_error_callback) \
927 V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
928 V(TypedDataViewTypedData, TypedDataView_typed_data) \
929 V(TypedListBaseLength, TypedDataBase_length) \
930 V(WeakProperty_getKey, WeakProperty_key) \
931 V(WeakProperty_getValue, WeakProperty_value) \
932 V(WeakReference_getTarget, WeakReference_target)
933
934#define STORE_NATIVE_FIELD(V) \
935 V(Finalizer_setCallback, Finalizer_callback) \
936 V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
937 V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
938 V(FinalizerEntry_setToken, FinalizerEntry_token) \
939 V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
940 V(ReceivePort_setHandler, ReceivePort_handler) \
941 V(LinkedHashBase_setData, LinkedHashBase_data) \
942 V(LinkedHashBase_setIndex, LinkedHashBase_index) \
943 V(SuspendState_setFunctionData, SuspendState_function_data) \
944 V(SuspendState_setThenCallback, SuspendState_then_callback) \
945 V(SuspendState_setErrorCallback, SuspendState_error_callback) \
946 V(WeakProperty_setKey, WeakProperty_key) \
947 V(WeakProperty_setValue, WeakProperty_value) \
948 V(WeakReference_setTarget, WeakReference_target)
949
950#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
951 V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
952 V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
953 V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
954
956 const Function& function) {
957 const MethodRecognizer::Kind kind = function.recognized_kind();
958
959 switch (kind) {
960#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
961 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
962 FALL_THROUGH; \
963 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
964 FALL_THROUGH; \
965 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
966 FALL_THROUGH;
968#undef TYPED_DATA_GET_INDEXED_CASES
969 case MethodRecognizer::kObjectArrayGetIndexed:
970 case MethodRecognizer::kGrowableArrayGetIndexed:
971 case MethodRecognizer::kRecord_fieldAt:
972 case MethodRecognizer::kRecord_fieldNames:
973 case MethodRecognizer::kRecord_numFields:
974 case MethodRecognizer::kSuspendState_clone:
975 case MethodRecognizer::kSuspendState_resume:
976 case MethodRecognizer::kTypedList_GetInt8:
977 case MethodRecognizer::kTypedList_SetInt8:
978 case MethodRecognizer::kTypedList_GetUint8:
979 case MethodRecognizer::kTypedList_SetUint8:
980 case MethodRecognizer::kTypedList_GetInt16:
981 case MethodRecognizer::kTypedList_SetInt16:
982 case MethodRecognizer::kTypedList_GetUint16:
983 case MethodRecognizer::kTypedList_SetUint16:
984 case MethodRecognizer::kTypedList_GetInt32:
985 case MethodRecognizer::kTypedList_SetInt32:
986 case MethodRecognizer::kTypedList_GetUint32:
987 case MethodRecognizer::kTypedList_SetUint32:
988 case MethodRecognizer::kTypedList_GetInt64:
989 case MethodRecognizer::kTypedList_SetInt64:
990 case MethodRecognizer::kTypedList_GetUint64:
991 case MethodRecognizer::kTypedList_SetUint64:
992 case MethodRecognizer::kTypedList_GetFloat32:
993 case MethodRecognizer::kTypedList_SetFloat32:
994 case MethodRecognizer::kTypedList_GetFloat64:
995 case MethodRecognizer::kTypedList_SetFloat64:
996 case MethodRecognizer::kTypedList_GetInt32x4:
997 case MethodRecognizer::kTypedList_SetInt32x4:
998 case MethodRecognizer::kTypedList_GetFloat32x4:
999 case MethodRecognizer::kTypedList_SetFloat32x4:
1000 case MethodRecognizer::kTypedList_GetFloat64x2:
1001 case MethodRecognizer::kTypedList_SetFloat64x2:
1002 case MethodRecognizer::kTypedData_memMove1:
1003 case MethodRecognizer::kTypedData_memMove2:
1004 case MethodRecognizer::kTypedData_memMove4:
1005 case MethodRecognizer::kTypedData_memMove8:
1006 case MethodRecognizer::kTypedData_memMove16:
1007 case MethodRecognizer::kTypedData_ByteDataView_factory:
1008 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
1009 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
1010 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
1011 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
1012 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
1013 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
1014 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
1015 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
1016 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
1017 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
1018 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
1019 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
1020 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
1021 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
1022 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1023 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
1024 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
1025 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
1026 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
1027 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
1028 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
1029 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
1030 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
1031 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
1032 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
1033 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
1034 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
1035 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
1036 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
1037 case MethodRecognizer::kTypedData_Int8Array_factory:
1038 case MethodRecognizer::kTypedData_Uint8Array_factory:
1039 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
1040 case MethodRecognizer::kTypedData_Int16Array_factory:
1041 case MethodRecognizer::kTypedData_Uint16Array_factory:
1042 case MethodRecognizer::kTypedData_Int32Array_factory:
1043 case MethodRecognizer::kTypedData_Uint32Array_factory:
1044 case MethodRecognizer::kTypedData_Int64Array_factory:
1045 case MethodRecognizer::kTypedData_Uint64Array_factory:
1046 case MethodRecognizer::kTypedData_Float32Array_factory:
1047 case MethodRecognizer::kTypedData_Float64Array_factory:
1048 case MethodRecognizer::kTypedData_Float32x4Array_factory:
1049 case MethodRecognizer::kTypedData_Int32x4Array_factory:
1050 case MethodRecognizer::kTypedData_Float64x2Array_factory:
1051 case MethodRecognizer::kMemCopy:
1052 case MethodRecognizer::kFfiLoadInt8:
1053 case MethodRecognizer::kFfiLoadInt16:
1054 case MethodRecognizer::kFfiLoadInt32:
1055 case MethodRecognizer::kFfiLoadInt64:
1056 case MethodRecognizer::kFfiLoadUint8:
1057 case MethodRecognizer::kFfiLoadUint16:
1058 case MethodRecognizer::kFfiLoadUint32:
1059 case MethodRecognizer::kFfiLoadUint64:
1060 case MethodRecognizer::kFfiLoadFloat:
1061 case MethodRecognizer::kFfiLoadFloatUnaligned:
1062 case MethodRecognizer::kFfiLoadDouble:
1063 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1064 case MethodRecognizer::kFfiLoadPointer:
1065 case MethodRecognizer::kFfiNativeCallbackFunction:
1066 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1067 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction:
1068 case MethodRecognizer::kFfiStoreInt8:
1069 case MethodRecognizer::kFfiStoreInt16:
1070 case MethodRecognizer::kFfiStoreInt32:
1071 case MethodRecognizer::kFfiStoreInt64:
1072 case MethodRecognizer::kFfiStoreUint8:
1073 case MethodRecognizer::kFfiStoreUint16:
1074 case MethodRecognizer::kFfiStoreUint32:
1075 case MethodRecognizer::kFfiStoreUint64:
1076 case MethodRecognizer::kFfiStoreFloat:
1077 case MethodRecognizer::kFfiStoreFloatUnaligned:
1078 case MethodRecognizer::kFfiStoreDouble:
1079 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1080 case MethodRecognizer::kFfiStorePointer:
1081 case MethodRecognizer::kFfiFromAddress:
1082 case MethodRecognizer::kFfiGetAddress:
1083 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1084 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1085 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1086 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1087 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1088 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1089 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1090 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1091 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1092 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1093 case MethodRecognizer::kGetNativeField:
1094 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1095 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1096 case MethodRecognizer::kFinalizerBase_setIsolate:
1097 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1098 case MethodRecognizer::kFinalizerEntry_allocate:
1099 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1100 case MethodRecognizer::kCheckNotDeeplyImmutable:
1101 case MethodRecognizer::kObjectEquals:
1102 case MethodRecognizer::kStringBaseCodeUnitAt:
1103 case MethodRecognizer::kStringBaseLength:
1104 case MethodRecognizer::kStringBaseIsEmpty:
1105 case MethodRecognizer::kClassIDgetID:
1106 case MethodRecognizer::kGrowableArrayAllocateWithData:
1107 case MethodRecognizer::kGrowableArrayCapacity:
1108 case MethodRecognizer::kObjectArrayAllocate:
1109 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1110 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1111 case MethodRecognizer::kFfiAbi:
1112 case MethodRecognizer::kUtf8DecoderScan:
1113 case MethodRecognizer::kHas63BitSmis:
1114 case MethodRecognizer::kExtensionStreamHasListener:
1115 case MethodRecognizer::kSmi_hashCode:
1116 case MethodRecognizer::kMint_hashCode:
1117 case MethodRecognizer::kDouble_hashCode:
1118#define CASE(method, slot) case MethodRecognizer::k##method:
1122#undef CASE
1123 return true;
1124 case MethodRecognizer::kDoubleToInteger:
1125 case MethodRecognizer::kDoubleMod:
1126 case MethodRecognizer::kDoubleRem:
1127 case MethodRecognizer::kDoubleRoundToDouble:
1128 case MethodRecognizer::kDoubleTruncateToDouble:
1129 case MethodRecognizer::kDoubleFloorToDouble:
1130 case MethodRecognizer::kDoubleCeilToDouble:
1131 case MethodRecognizer::kMathDoublePow:
1132 case MethodRecognizer::kMathSin:
1133 case MethodRecognizer::kMathCos:
1134 case MethodRecognizer::kMathTan:
1135 case MethodRecognizer::kMathAsin:
1136 case MethodRecognizer::kMathAcos:
1137 case MethodRecognizer::kMathAtan:
1138 case MethodRecognizer::kMathAtan2:
1139 case MethodRecognizer::kMathExp:
1140 case MethodRecognizer::kMathLog:
1141 case MethodRecognizer::kMathSqrt:
1142 return true;
1143 default:
1144 return false;
1145 }
1146}
1147
1149 const Function& function) {
1151 switch (function.recognized_kind()) {
1152 case MethodRecognizer::kStringBaseCodeUnitAt:
1153 return true;
1154 default:
1155 return false;
1156 }
1157}
1158
1159FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1160 const Function& function) {
1162
1163 graph_entry_ =
1164 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
1165
1166 auto normal_entry = BuildFunctionEntry(graph_entry_);
1167 graph_entry_->set_normal_entry(normal_entry);
1168
1169 PrologueInfo prologue_info(-1, -1);
1170 BlockEntryInstr* instruction_cursor =
1171 BuildPrologue(normal_entry, &prologue_info);
1172
1173 Fragment body(instruction_cursor);
1174 body += CheckStackOverflowInPrologue(function.token_pos());
1175
1176 if (function.IsDynamicInvocationForwarder()) {
1177 body += BuildDefaultTypeHandling(function);
1178 BuildTypeArgumentTypeChecks(
1180 BuildArgumentTypeChecks(&body, &body, nullptr);
1181 }
1182
1183 const MethodRecognizer::Kind kind = function.recognized_kind();
1184 switch (kind) {
1185#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
1186 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
1187 FALL_THROUGH; \
1188 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
1189 FALL_THROUGH; \
1190 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
1191 FALL_THROUGH;
1193#undef TYPED_DATA_GET_INDEXED_CASES
1194 case MethodRecognizer::kObjectArrayGetIndexed:
1195 case MethodRecognizer::kGrowableArrayGetIndexed: {
1196 ASSERT_EQUAL(function.NumParameters(), 2);
1197 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1198 const Representation elem_rep =
1200 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1202 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1203 body += GenericCheckBound();
1204 LocalVariable* safe_index = MakeTemporary();
1205 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1206 if (IsTypedDataBaseClassId(array_cid) && !CanUnboxElements(array_cid)) {
1207 const auto& native_function =
1208 TypedListGetNativeFunction(thread_, array_cid);
1209 body += LoadLocal(safe_index);
1210 body += UnboxTruncate(kUnboxedIntPtr);
1213 body += BinaryIntegerOp(Token::kSHL, kUnboxedIntPtr,
1214 /*is_truncating=*/true);
1215 body += StaticCall(TokenPosition::kNoSource, native_function, 2,
1216 ICData::kNoRebind);
1217 } else {
1218 if (kind == MethodRecognizer::kGrowableArrayGetIndexed) {
1219 body += LoadNativeField(Slot::GrowableObjectArray_data());
1220 array_cid = kArrayCid;
1221 } else if (IsExternalTypedDataClassId(array_cid)) {
1222 body += LoadNativeField(Slot::PointerBase_data(),
1224 }
1225 body += LoadLocal(safe_index);
1226 body +=
1227 LoadIndexed(array_cid,
1228 /*index_scale=*/
1230 /*index_unboxed=*/
1232 if (elem_rep == kUnboxedFloat) {
1233 body += FloatToDouble();
1234 }
1235 }
1236 body += DropTempsPreserveTop(1); // Drop [safe_index], keep result.
1237 break;
1238 }
1239 case MethodRecognizer::kRecord_fieldAt:
1240 ASSERT_EQUAL(function.NumParameters(), 2);
1241 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1242 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1243 body += LoadIndexed(
1244 kRecordCid, /*index_scale*/ compiler::target::kCompressedWordSize);
1245 break;
1246 case MethodRecognizer::kRecord_fieldNames:
1247 body += LoadObjectStore();
1248 body += LoadNativeField(Slot::ObjectStore_record_field_names());
1249 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1250 body += LoadNativeField(Slot::Record_shape());
1252 body += SmiBinaryOp(Token::kSHR);
1254 body += SmiBinaryOp(Token::kBIT_AND);
1255 body += LoadIndexed(
1256 kArrayCid, /*index_scale=*/compiler::target::kCompressedWordSize);
1257 break;
1258 case MethodRecognizer::kRecord_numFields:
1259 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1260 body += LoadNativeField(Slot::Record_shape());
1262 body += SmiBinaryOp(Token::kBIT_AND);
1263 break;
1264 case MethodRecognizer::kSuspendState_clone: {
1265 ASSERT_EQUAL(function.NumParameters(), 1);
1266 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1267 body += Call1ArgStub(TokenPosition::kNoSource,
1269 break;
1270 }
1271 case MethodRecognizer::kSuspendState_resume: {
1272 const Code& resume_stub =
1273 Code::ZoneHandle(Z, IG->object_store()->resume_stub());
1274 body += NullConstant();
1275 body += TailCall(resume_stub);
1276 break;
1277 }
1278 case MethodRecognizer::kTypedList_GetInt8:
1279 body += BuildTypedListGet(function, kTypedDataInt8ArrayCid);
1280 break;
1281 case MethodRecognizer::kTypedList_SetInt8:
1282 body += BuildTypedListSet(function, kTypedDataInt8ArrayCid);
1283 break;
1284 case MethodRecognizer::kTypedList_GetUint8:
1285 body += BuildTypedListGet(function, kTypedDataUint8ArrayCid);
1286 break;
1287 case MethodRecognizer::kTypedList_SetUint8:
1288 body += BuildTypedListSet(function, kTypedDataUint8ArrayCid);
1289 break;
1290 case MethodRecognizer::kTypedList_GetInt16:
1291 body += BuildTypedListGet(function, kTypedDataInt16ArrayCid);
1292 break;
1293 case MethodRecognizer::kTypedList_SetInt16:
1294 body += BuildTypedListSet(function, kTypedDataInt16ArrayCid);
1295 break;
1296 case MethodRecognizer::kTypedList_GetUint16:
1297 body += BuildTypedListGet(function, kTypedDataUint16ArrayCid);
1298 break;
1299 case MethodRecognizer::kTypedList_SetUint16:
1300 body += BuildTypedListSet(function, kTypedDataUint16ArrayCid);
1301 break;
1302 case MethodRecognizer::kTypedList_GetInt32:
1303 body += BuildTypedListGet(function, kTypedDataInt32ArrayCid);
1304 break;
1305 case MethodRecognizer::kTypedList_SetInt32:
1306 body += BuildTypedListSet(function, kTypedDataInt32ArrayCid);
1307 break;
1308 case MethodRecognizer::kTypedList_GetUint32:
1309 body += BuildTypedListGet(function, kTypedDataUint32ArrayCid);
1310 break;
1311 case MethodRecognizer::kTypedList_SetUint32:
1312 body += BuildTypedListSet(function, kTypedDataUint32ArrayCid);
1313 break;
1314 case MethodRecognizer::kTypedList_GetInt64:
1315 body += BuildTypedListGet(function, kTypedDataInt64ArrayCid);
1316 break;
1317 case MethodRecognizer::kTypedList_SetInt64:
1318 body += BuildTypedListSet(function, kTypedDataInt64ArrayCid);
1319 break;
1320 case MethodRecognizer::kTypedList_GetUint64:
1321 body += BuildTypedListGet(function, kTypedDataUint64ArrayCid);
1322 break;
1323 case MethodRecognizer::kTypedList_SetUint64:
1324 body += BuildTypedListSet(function, kTypedDataUint64ArrayCid);
1325 break;
1326 case MethodRecognizer::kTypedList_GetFloat32:
1327 body += BuildTypedListGet(function, kTypedDataFloat32ArrayCid);
1328 break;
1329 case MethodRecognizer::kTypedList_SetFloat32:
1330 body += BuildTypedListSet(function, kTypedDataFloat32ArrayCid);
1331 break;
1332 case MethodRecognizer::kTypedList_GetFloat64:
1333 body += BuildTypedListGet(function, kTypedDataFloat64ArrayCid);
1334 break;
1335 case MethodRecognizer::kTypedList_SetFloat64:
1336 body += BuildTypedListSet(function, kTypedDataFloat64ArrayCid);
1337 break;
1338 case MethodRecognizer::kTypedList_GetInt32x4:
1339 body += BuildTypedListGet(function, kTypedDataInt32x4ArrayCid);
1340 break;
1341 case MethodRecognizer::kTypedList_SetInt32x4:
1342 body += BuildTypedListSet(function, kTypedDataInt32x4ArrayCid);
1343 break;
1344 case MethodRecognizer::kTypedList_GetFloat32x4:
1345 body += BuildTypedListGet(function, kTypedDataFloat32x4ArrayCid);
1346 break;
1347 case MethodRecognizer::kTypedList_SetFloat32x4:
1348 body += BuildTypedListSet(function, kTypedDataFloat32x4ArrayCid);
1349 break;
1350 case MethodRecognizer::kTypedList_GetFloat64x2:
1351 body += BuildTypedListGet(function, kTypedDataFloat64x2ArrayCid);
1352 break;
1353 case MethodRecognizer::kTypedList_SetFloat64x2:
1354 body += BuildTypedListSet(function, kTypedDataFloat64x2ArrayCid);
1355 break;
1356 case MethodRecognizer::kTypedData_memMove1:
1357 body += BuildTypedDataMemMove(function, kTypedDataInt8ArrayCid);
1358 break;
1359 case MethodRecognizer::kTypedData_memMove2:
1360 body += BuildTypedDataMemMove(function, kTypedDataInt16ArrayCid);
1361 break;
1362 case MethodRecognizer::kTypedData_memMove4:
1363 body += BuildTypedDataMemMove(function, kTypedDataInt32ArrayCid);
1364 break;
1365 case MethodRecognizer::kTypedData_memMove8:
1366 body += BuildTypedDataMemMove(function, kTypedDataInt64ArrayCid);
1367 break;
1368 case MethodRecognizer::kTypedData_memMove16:
1369 body += BuildTypedDataMemMove(function, kTypedDataInt32x4ArrayCid);
1370 break;
1371#define CASE(name) \
1372 case MethodRecognizer::kTypedData_##name##_factory: \
1373 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1374 break; \
1375 case MethodRecognizer::kTypedData_##name##View_factory: \
1376 body += BuildTypedDataViewFactoryConstructor(function, \
1377 kTypedData##name##ViewCid); \
1378 break; \
1379 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1380 body += BuildTypedDataViewFactoryConstructor( \
1381 function, kUnmodifiableTypedData##name##ViewCid); \
1382 break;
1384#undef CASE
1385 case MethodRecognizer::kTypedData_ByteDataView_factory:
1386 body += BuildTypedDataViewFactoryConstructor(function, kByteDataViewCid);
1387 break;
1388 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1389 body += BuildTypedDataViewFactoryConstructor(
1391 break;
1392 case MethodRecognizer::kObjectEquals:
1393 ASSERT_EQUAL(function.NumParameters(), 2);
1394 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1395 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1396 body += StrictCompare(Token::kEQ_STRICT);
1397 break;
1398 case MethodRecognizer::kStringBaseCodeUnitAt: {
1399 ASSERT_EQUAL(function.NumParameters(), 2);
1400 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1401 body += LoadNativeField(Slot::String_length());
1402 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1403 body += GenericCheckBound();
1404 LocalVariable* safe_index = MakeTemporary();
1405
1406 JoinEntryInstr* done = BuildJoinEntry();
1407 LocalVariable* result = parsed_function_->expression_temp_var();
1408 TargetEntryInstr* one_byte_string;
1409 TargetEntryInstr* two_byte_string;
1410 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1411 body += LoadClassId();
1412 body += IntConstant(kOneByteStringCid);
1413 body += BranchIfEqual(&one_byte_string, &two_byte_string);
1414
1415 body.current = one_byte_string;
1416 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1417 body += LoadLocal(safe_index);
1418 body += LoadIndexed(
1419 kOneByteStringCid,
1420 /*index_scale=*/
1423 body += StoreLocal(TokenPosition::kNoSource, result);
1424 body += Drop();
1425 body += Goto(done);
1426
1427 body.current = two_byte_string;
1428 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1429 body += LoadLocal(safe_index);
1430 body += LoadIndexed(
1431 kTwoByteStringCid,
1432 /*index_scale=*/
1435 body += StoreLocal(TokenPosition::kNoSource, result);
1436 body += Drop();
1437 body += Goto(done);
1438
1439 body.current = done;
1440 body += DropTemporary(&safe_index);
1441 body += LoadLocal(result);
1442 } break;
1443 case MethodRecognizer::kStringBaseLength:
1444 case MethodRecognizer::kStringBaseIsEmpty:
1445 ASSERT_EQUAL(function.NumParameters(), 1);
1446 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1447 body += LoadNativeField(Slot::String_length());
1448 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1449 body += IntConstant(0);
1450 body += StrictCompare(Token::kEQ_STRICT);
1451 }
1452 break;
1453 case MethodRecognizer::kClassIDgetID:
1454 ASSERT_EQUAL(function.NumParameters(), 1);
1455 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1456 body += LoadClassId();
1457 break;
1458 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1459 ASSERT(function.IsFactory());
1460 ASSERT_EQUAL(function.NumParameters(), 2);
1461 const Class& cls =
1463 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1464 body += AllocateObject(TokenPosition::kNoSource, cls, 1);
1465 LocalVariable* object = MakeTemporary();
1466 body += LoadLocal(object);
1467 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1468 body += StoreNativeField(Slot::GrowableObjectArray_data(),
1471 body += LoadLocal(object);
1472 body += IntConstant(0);
1473 body += StoreNativeField(Slot::GrowableObjectArray_length(),
1476 break;
1477 }
1478 case MethodRecognizer::kGrowableArrayCapacity:
1479 ASSERT_EQUAL(function.NumParameters(), 1);
1480 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1481 body += LoadNativeField(Slot::GrowableObjectArray_data());
1482 body += LoadNativeField(Slot::Array_length());
1483 break;
1484 case MethodRecognizer::kObjectArrayAllocate:
1485 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
1486 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1487 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1488 body += CreateArray();
1489 break;
1490 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1491 ASSERT_EQUAL(function.NumParameters(), 5);
1492 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1493 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1494 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1495 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1496 body += LoadLocal(parsed_function_->RawParameterVariable(4));
1497 body += MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1498 /*unboxed_inputs=*/false,
1499 /*can_overlap=*/false);
1500 body += NullConstant();
1501 break;
1502 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1503 ASSERT_EQUAL(function.NumParameters(), 2);
1504 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1505 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1506 // Uses a store-release barrier so that other isolates will see the
1507 // contents of the index after seeing the index itself.
1508 body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
1511 body += NullConstant();
1512 break;
1513 case MethodRecognizer::kUtf8DecoderScan:
1514 ASSERT_EQUAL(function.NumParameters(), 5);
1515 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // decoder
1516 body += LoadLocal(parsed_function_->RawParameterVariable(1)); // bytes
1517 body += LoadLocal(parsed_function_->RawParameterVariable(2)); // start
1519 body += UnboxTruncate(kUnboxedIntPtr);
1520 body += LoadLocal(parsed_function_->RawParameterVariable(3)); // end
1522 body += UnboxTruncate(kUnboxedIntPtr);
1523 body += LoadLocal(parsed_function_->RawParameterVariable(4)); // table
1524 body += Utf8Scan();
1525 body += Box(kUnboxedIntPtr);
1526 break;
1527 case MethodRecognizer::kMemCopy: {
1528 ASSERT_EQUAL(function.NumParameters(), 5);
1529 LocalVariable* arg_target = parsed_function_->RawParameterVariable(0);
1530 LocalVariable* arg_target_offset_in_bytes =
1531 parsed_function_->RawParameterVariable(1);
1532 LocalVariable* arg_source = parsed_function_->RawParameterVariable(2);
1533 LocalVariable* arg_source_offset_in_bytes =
1534 parsed_function_->RawParameterVariable(3);
1535 LocalVariable* arg_length_in_bytes =
1536 parsed_function_->RawParameterVariable(4);
1537 body += LoadLocal(arg_source);
1538 body += LoadLocal(arg_target);
1539 body += LoadLocal(arg_source_offset_in_bytes);
1540 body += UnboxTruncate(kUnboxedIntPtr);
1541 body += LoadLocal(arg_target_offset_in_bytes);
1542 body += UnboxTruncate(kUnboxedIntPtr);
1543 body += LoadLocal(arg_length_in_bytes);
1544 body += UnboxTruncate(kUnboxedIntPtr);
1545 body += MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1546 /*unboxed_inputs=*/true,
1547 /*can_overlap=*/true);
1548 body += NullConstant();
1549 } break;
1550 case MethodRecognizer::kFfiAbi:
1551 ASSERT_EQUAL(function.NumParameters(), 0);
1552 body += IntConstant(static_cast<int64_t>(compiler::ffi::TargetAbi()));
1553 break;
1554 case MethodRecognizer::kFfiNativeCallbackFunction:
1555 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1556 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction: {
1557 const auto& error = String::ZoneHandle(
1558 Z, Symbols::New(thread_,
1559 "This function should be handled on call site."));
1560 body += Constant(error);
1561 body += ThrowException(TokenPosition::kNoSource);
1562 break;
1563 }
1564 case MethodRecognizer::kFfiLoadInt8:
1565 case MethodRecognizer::kFfiLoadInt16:
1566 case MethodRecognizer::kFfiLoadInt32:
1567 case MethodRecognizer::kFfiLoadInt64:
1568 case MethodRecognizer::kFfiLoadUint8:
1569 case MethodRecognizer::kFfiLoadUint16:
1570 case MethodRecognizer::kFfiLoadUint32:
1571 case MethodRecognizer::kFfiLoadUint64:
1572 case MethodRecognizer::kFfiLoadFloat:
1573 case MethodRecognizer::kFfiLoadFloatUnaligned:
1574 case MethodRecognizer::kFfiLoadDouble:
1575 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1576 case MethodRecognizer::kFfiLoadPointer: {
1577 const classid_t ffi_type_arg_cid =
1579 const AlignmentType alignment =
1582 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1583
1584 ASSERT_EQUAL(function.NumParameters(), 2);
1585 // Argument can be a TypedData for loads on struct fields.
1586 LocalVariable* arg_typed_data_base =
1587 parsed_function_->RawParameterVariable(0);
1588 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1589
1590 body += LoadLocal(arg_typed_data_base);
1592 body += LoadLocal(arg_offset);
1594 body += UnboxTruncate(kUnboxedIntPtr);
1595 body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
1596 /*index_unboxed=*/true, alignment);
1597 if (kind == MethodRecognizer::kFfiLoadPointer) {
1598 const auto& pointer_class =
1599 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1600 const auto& type_arguments = TypeArguments::ZoneHandle(
1601 Z, IG->object_store()->type_argument_never());
1602
1603 // We do not reify Pointer type arguments
1604 ASSERT(function.NumTypeParameters() == 1);
1605 LocalVariable* address = MakeTemporary();
1606 body += Constant(type_arguments);
1607 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1608 LocalVariable* pointer = MakeTemporary();
1609 body += LoadLocal(pointer);
1610 body += LoadLocal(address);
1613 body += ConvertUnboxedToUntagged();
1614 body += StoreNativeField(Slot::PointerBase_data(),
1617 body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
1618 } else {
1619 // Avoid any unnecessary (and potentially deoptimizing) int
1620 // conversions by using the representation returned from LoadIndexed.
1622 }
1623 } break;
1624 case MethodRecognizer::kFfiStoreInt8:
1625 case MethodRecognizer::kFfiStoreInt16:
1626 case MethodRecognizer::kFfiStoreInt32:
1627 case MethodRecognizer::kFfiStoreInt64:
1628 case MethodRecognizer::kFfiStoreUint8:
1629 case MethodRecognizer::kFfiStoreUint16:
1630 case MethodRecognizer::kFfiStoreUint32:
1631 case MethodRecognizer::kFfiStoreUint64:
1632 case MethodRecognizer::kFfiStoreFloat:
1633 case MethodRecognizer::kFfiStoreFloatUnaligned:
1634 case MethodRecognizer::kFfiStoreDouble:
1635 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1636 case MethodRecognizer::kFfiStorePointer: {
1637 const classid_t ffi_type_arg_cid =
1639 const AlignmentType alignment =
1642 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1643
1644 // Argument can be a TypedData for stores on struct fields.
1645 LocalVariable* arg_typed_data_base =
1646 parsed_function_->RawParameterVariable(0);
1647 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1648 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
1649
1650 ASSERT_EQUAL(function.NumParameters(), 3);
1651
1652 body += LoadLocal(arg_typed_data_base); // Pointer.
1654 body += LoadLocal(arg_offset);
1656 body += UnboxTruncate(kUnboxedIntPtr);
1657 body += LoadLocal(arg_value);
1659 if (kind == MethodRecognizer::kFfiStorePointer) {
1660 // This can only be Pointer, so it is safe to load the data field.
1661 body += LoadNativeField(Slot::PointerBase_data(),
1663 body += ConvertUntaggedToUnboxed();
1666 } else {
1667 // Avoid any unnecessary (and potentially deoptimizing) int
1668 // conversions by using the representation consumed by StoreIndexed.
1669 body += UnboxTruncate(
1671 }
1672 body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
1673 /*index_unboxed=*/true, alignment);
1674 body += NullConstant();
1675 } break;
1676 case MethodRecognizer::kFfiFromAddress: {
1677 const auto& pointer_class =
1678 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1679 const auto& type_arguments = TypeArguments::ZoneHandle(
1680 Z, IG->object_store()->type_argument_never());
1681
1682 ASSERT(function.NumTypeParameters() == 1);
1683 ASSERT_EQUAL(function.NumParameters(), 1);
1684 body += Constant(type_arguments);
1685 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1686 body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
1687 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
1689 // Use the same representation as FfiGetAddress so that the conversions
1690 // in Pointer.fromAddress(address).address cancel out if the temporary
1691 // Pointer allocation is removed.
1692 body += UnboxTruncate(kUnboxedAddress);
1693 body += ConvertUnboxedToUntagged();
1694 body += StoreNativeField(Slot::PointerBase_data(),
1697 } break;
1698 case MethodRecognizer::kFfiGetAddress: {
1699 ASSERT_EQUAL(function.NumParameters(), 1);
1700 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Pointer.
1702 // This can only be Pointer, so it is safe to load the data field.
1703 body += LoadNativeField(Slot::PointerBase_data(),
1705 body += ConvertUntaggedToUnboxed();
1706 body += Box(kUnboxedAddress);
1707 } break;
1708 case MethodRecognizer::kHas63BitSmis: {
1709#if defined(HAS_SMI_63_BITS)
1710 body += Constant(Bool::True());
1711#else
1712 body += Constant(Bool::False());
1713#endif // defined(ARCH_IS_64_BIT)
1714 } break;
1715 case MethodRecognizer::kExtensionStreamHasListener: {
1716#ifdef PRODUCT
1717 body += Constant(Bool::False());
1718#else
1719 body += LoadServiceExtensionStream();
1720 body += LoadNativeField(Slot::StreamInfo_enabled());
1721 // StreamInfo::enabled_ is a std::atomic<intptr_t>. This is effectively
1722 // relaxed order access, which is acceptable for this use case.
1723 body += IntToBool();
1724#endif // PRODUCT
1725 } break;
1726 case MethodRecognizer::kSmi_hashCode: {
1727 // TODO(dartbug.com/38985): We should make this LoadLocal+Unbox+
1728 // IntegerHash+Box. Though this would make use of unboxed values on stack
1729 // which isn't allowed in unoptimized mode.
1730 // Once force-optimized functions can be inlined, we should change this
1731 // code to the above.
1732 ASSERT_EQUAL(function.NumParameters(), 1);
1733 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1734 body += BuildIntegerHashCode(/*smi=*/true);
1735 } break;
1736 case MethodRecognizer::kMint_hashCode: {
1737 ASSERT_EQUAL(function.NumParameters(), 1);
1738 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1739 body += BuildIntegerHashCode(/*smi=*/false);
1740 } break;
1741 case MethodRecognizer::kDouble_hashCode: {
1742 ASSERT_EQUAL(function.NumParameters(), 1);
1743 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1744 body += UnboxTruncate(kUnboxedDouble);
1745 body += BuildDoubleHashCode();
1746 body += Box(kUnboxedInt64);
1747 } break;
1748 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1749 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1750 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1751 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1752 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1753 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1754 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1755 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1756 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1757 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1758 const classid_t ffi_type_arg_cid =
1762
1763 auto class_table = thread_->isolate_group()->class_table();
1764 ASSERT(class_table->HasValidClassAt(external_typed_data_cid));
1765 const auto& typed_data_class =
1766 Class::ZoneHandle(H.zone(), class_table->At(external_typed_data_cid));
1767
1768 // We assume that the caller has checked that the arguments are non-null
1769 // and length is in the range [0, kSmiMax/elementSize].
1770 ASSERT_EQUAL(function.NumParameters(), 2);
1771 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
1772 LocalVariable* arg_length = parsed_function_->RawParameterVariable(1);
1773
1774 body += AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1775 LocalVariable* typed_data_object = MakeTemporary();
1776
1777 // Initialize the result's length field.
1778 body += LoadLocal(typed_data_object);
1779 body += LoadLocal(arg_length);
1783
1784 // Initialize the result's data pointer field.
1785 body += LoadLocal(typed_data_object);
1786 body += LoadLocal(arg_pointer);
1787 body += LoadNativeField(Slot::PointerBase_data(),
1789 body += StoreNativeField(Slot::PointerBase_data(),
1792 } break;
1793 case MethodRecognizer::kGetNativeField: {
1794 auto& name = String::ZoneHandle(Z, function.name());
1795 // Note: This method is force optimized so we can push untagged, etc.
1796 // Load TypedDataArray from Instance Handle implementing
1797 // NativeFieldWrapper.
1798 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Object.
1799 body += CheckNullOptimized(name);
1800 body += LoadNativeField(Slot::Instance_native_fields_array()); // Fields.
1801 body += CheckNullOptimized(name);
1802 // Load the native field at index.
1803 body += IntConstant(0); // Index.
1804 body += LoadIndexed(kIntPtrCid);
1805 body += Box(kUnboxedIntPtr);
1806 } break;
1807 case MethodRecognizer::kDoubleToInteger: {
1808 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1809 body += DoubleToInteger(kind);
1810 } break;
1811 case MethodRecognizer::kDoubleMod:
1812 case MethodRecognizer::kDoubleRem:
1813 case MethodRecognizer::kDoubleRoundToDouble:
1814 case MethodRecognizer::kDoubleTruncateToDouble:
1815 case MethodRecognizer::kDoubleFloorToDouble:
1816 case MethodRecognizer::kDoubleCeilToDouble:
1817 case MethodRecognizer::kMathDoublePow:
1818 case MethodRecognizer::kMathSin:
1819 case MethodRecognizer::kMathCos:
1820 case MethodRecognizer::kMathTan:
1821 case MethodRecognizer::kMathAsin:
1822 case MethodRecognizer::kMathAcos:
1823 case MethodRecognizer::kMathAtan:
1824 case MethodRecognizer::kMathAtan2:
1825 case MethodRecognizer::kMathExp:
1826 case MethodRecognizer::kMathLog: {
1827 for (intptr_t i = 0, n = function.NumParameters(); i < n; ++i) {
1828 body += LoadLocal(parsed_function_->RawParameterVariable(i));
1829 }
1830 body += InvokeMathCFunction(kind, function.NumParameters());
1831 } break;
1832 case MethodRecognizer::kMathSqrt: {
1833 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1834 body += UnaryDoubleOp(Token::kSQRT);
1835 } break;
1836 case MethodRecognizer::kFinalizerBase_setIsolate:
1837 ASSERT_EQUAL(function.NumParameters(), 1);
1838 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1839 body += LoadIsolate();
1840 body += StoreNativeField(Slot::FinalizerBase_isolate(),
1842 body += NullConstant();
1843 break;
1844 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1845 ASSERT_EQUAL(function.NumParameters(), 0);
1846 body += LoadIsolate();
1847 body += LoadNativeField(Slot::Isolate_finalizers());
1848 break;
1849 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1850 ASSERT_EQUAL(function.NumParameters(), 1);
1851 body += LoadIsolate();
1852 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1853 body += StoreNativeField(Slot::Isolate_finalizers());
1854 body += NullConstant();
1855 break;
1856 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1857 ASSERT_EQUAL(function.NumParameters(), 1);
1858 ASSERT(this->optimizing_);
1859 // This relies on being force-optimized to do an 'atomic' exchange w.r.t.
1860 // the GC.
1861 // As an alternative design we could introduce an ExchangeNativeFieldInstr
1862 // that uses the same machine code as std::atomic::exchange. Or we could
1863 // use an Native to do that in C.
1864 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1865 // No GC from here til StoreNativeField.
1866 body += LoadNativeField(Slot::FinalizerBase_entries_collected());
1867 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1868 body += NullConstant();
1869 body += StoreNativeField(Slot::FinalizerBase_entries_collected());
1870 break;
1871 case MethodRecognizer::kFinalizerEntry_allocate: {
1872 // Object value, Object token, Object detach, FinalizerBase finalizer
1873 ASSERT_EQUAL(function.NumParameters(), 4);
1874
1875 const auto class_table = thread_->isolate_group()->class_table();
1876 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1877 const auto& finalizer_entry_class =
1878 Class::ZoneHandle(H.zone(), class_table->At(kFinalizerEntryCid));
1879
1880 body +=
1881 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1882 LocalVariable* const entry = MakeTemporary("entry");
1883 // No GC from here to the end.
1884 body += LoadLocal(entry);
1885 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1886 body += StoreNativeField(Slot::FinalizerEntry_value());
1887 body += LoadLocal(entry);
1888 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1889 body += StoreNativeField(Slot::FinalizerEntry_token());
1890 body += LoadLocal(entry);
1891 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1892 body += StoreNativeField(Slot::FinalizerEntry_detach());
1893 body += LoadLocal(entry);
1894 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1895 body += StoreNativeField(Slot::FinalizerEntry_finalizer());
1896 body += LoadLocal(entry);
1898 body += StoreNativeField(Slot::FinalizerEntry_external_size());
1899 break;
1900 }
1901 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1902 ASSERT_EQUAL(function.NumParameters(), 1);
1903 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1904 body += LoadNativeField(Slot::FinalizerEntry_external_size());
1905 body += Box(kUnboxedInt64);
1906 break;
1907 case MethodRecognizer::kCheckNotDeeplyImmutable:
1908 ASSERT_EQUAL(function.NumParameters(), 1);
1909 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1912 body += NullConstant();
1913 break;
1914#define IL_BODY(method, slot) \
1915 case MethodRecognizer::k##method: \
1916 ASSERT_EQUAL(function.NumParameters(), 1); \
1917 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1918 body += LoadNativeField(Slot::slot()); \
1919 break;
1921#undef IL_BODY
1922#define IL_BODY(method, slot) \
1923 case MethodRecognizer::k##method: \
1924 ASSERT_EQUAL(function.NumParameters(), 2); \
1925 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1926 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1927 body += StoreNativeField(Slot::slot()); \
1928 body += NullConstant(); \
1929 break;
1931#undef IL_BODY
1932#define IL_BODY(method, slot) \
1933 case MethodRecognizer::k##method: \
1934 ASSERT_EQUAL(function.NumParameters(), 2); \
1935 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1936 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1937 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1938 kNoStoreBarrier); \
1939 body += NullConstant(); \
1940 break;
1942#undef IL_BODY
1943 default: {
1944 UNREACHABLE();
1945 break;
1946 }
1947 }
1948
1949 if (body.is_open()) {
1950 body +=
1951 Return(TokenPosition::kNoSource, /* omit_result_type_check = */ true);
1952 }
1953
1954 return new (Z)
1955 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
1956 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
1957}
1958
1959Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1960 const Function& function,
1961 classid_t cid) {
1962 auto token_pos = function.token_pos();
1963 auto class_table = Thread::Current()->isolate_group()->class_table();
1964
1965 ASSERT(class_table->HasValidClassAt(cid));
1966 const auto& view_class = Class::ZoneHandle(H.zone(), class_table->At(cid));
1967
1968 ASSERT(function.IsFactory() && (function.NumParameters() == 4));
1969 LocalVariable* typed_data = parsed_function_->RawParameterVariable(1);
1970 LocalVariable* offset_in_bytes = parsed_function_->RawParameterVariable(2);
1971 LocalVariable* length = parsed_function_->RawParameterVariable(3);
1972
1973 Fragment body;
1974
1975 // Note that we do no input checking here before allocation. The factory is
1976 // private, and only called by other code in the library implementation.
1977 // Thus, either the inputs are checked within Dart code before the factory is
1978 // called (e.g., the implementation of XList.sublistView), or the inputs to
1979 // the factory are retrieved from previously constructed TypedData objects
1980 // and thus already checked (e.g., the implementation of the
1981 // UnmodifiableXListView constructors).
1982
1983 body += AllocateObject(token_pos, view_class, /*arg_count=*/0);
1984 LocalVariable* view_object = MakeTemporary();
1985
1986 body += LoadLocal(view_object);
1987 body += LoadLocal(typed_data);
1990
1991 body += LoadLocal(view_object);
1992 body += LoadLocal(offset_in_bytes);
1993 body +=
1996
1997 body += LoadLocal(view_object);
1998 body += LoadLocal(length);
1999 body +=
2002
2003 // First unbox the offset in bytes prior to the unsafe untagged load to avoid
2004 // any boxes being inserted between the load and its use. While any such box
2005 // is eventually canonicalized away, the FlowGraphChecker runs after every
2006 // pass in DEBUG mode and may see the box before canonicalization happens.
2007 body += LoadLocal(offset_in_bytes);
2008 body += UnboxTruncate(kUnboxedIntPtr);
2009 LocalVariable* unboxed_offset_in_bytes =
2010 MakeTemporary("unboxed_offset_in_bytes");
2011 // Now update the inner pointer.
2012 //
2013 // WARNING: Notice that we assume here no GC happens between the
2014 // LoadNativeField and the StoreNativeField, as the GC expects a properly
2015 // updated data field (see ScavengerVisitorBase::VisitTypedDataViewPointers).
2016 body += LoadLocal(view_object);
2017 body += LoadLocal(typed_data);
2018 body += LoadNativeField(Slot::PointerBase_data(),
2021 body += LoadLocal(unboxed_offset_in_bytes);
2022 body += CalculateElementAddress(/*index_scale=*/1);
2023 body += StoreNativeField(Slot::PointerBase_data(),
2026 body += DropTemporary(&unboxed_offset_in_bytes);
2027
2028 return body;
2029}
2030
2031Fragment FlowGraphBuilder::BuildTypedListGet(const Function& function,
2032 classid_t cid) {
2033 const intptr_t kNumParameters = 2;
2034 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2035 // Guaranteed to be non-null since it's only called internally from other
2036 // instance methods.
2037 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2038 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2039 LocalVariable* arg_offset_in_bytes =
2040 parsed_function_->RawParameterVariable(1);
2041
2042 Fragment body;
2043 if (CanUnboxElements(cid)) {
2044 body += LoadLocal(arg_receiver);
2045 body += LoadLocal(arg_offset_in_bytes);
2046 body += LoadIndexed(cid, /*index_scale=*/1,
2047 /*index_unboxed=*/false, kUnalignedAccess);
2049 } else {
2050 const auto& native_function = TypedListGetNativeFunction(thread_, cid);
2051 body += LoadLocal(arg_receiver);
2052 body += LoadLocal(arg_offset_in_bytes);
2053 body += StaticCall(TokenPosition::kNoSource, native_function,
2054 kNumParameters, ICData::kNoRebind);
2055 }
2056 return body;
2057}
2058
2060 classid_t cid) {
2061 auto& state = thread->compiler_state();
2063 case kUnboxedFloat:
2064 return state.TypedListSetFloat32();
2065 case kUnboxedDouble:
2066 return state.TypedListSetFloat64();
2067 case kUnboxedInt32x4:
2068 return state.TypedListSetInt32x4();
2069 case kUnboxedFloat32x4:
2070 return state.TypedListSetFloat32x4();
2071 case kUnboxedFloat64x2:
2072 return state.TypedListSetFloat64x2();
2073 default:
2074 UNREACHABLE();
2075 return Object::null_function();
2076 }
2077}
2078
2079Fragment FlowGraphBuilder::BuildTypedListSet(const Function& function,
2080 classid_t cid) {
2081 const intptr_t kNumParameters = 3;
2082 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2083 // Guaranteed to be non-null since it's only called internally from other
2084 // instance methods.
2085 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2086 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2087 LocalVariable* arg_offset_in_bytes =
2088 parsed_function_->RawParameterVariable(1);
2089 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
2090
2091 Fragment body;
2092 if (CanUnboxElements(cid)) {
2093 body += LoadLocal(arg_receiver);
2094 body += LoadLocal(arg_offset_in_bytes);
2095 body += LoadLocal(arg_value);
2096 body +=
2098 body += UnboxTruncate(StoreIndexedInstr::ValueRepresentation(cid));
2099 body += StoreIndexedTypedData(cid, /*index_scale=*/1,
2100 /*index_unboxed=*/false, kUnalignedAccess);
2101 body += NullConstant();
2102 } else {
2103 const auto& native_function = TypedListSetNativeFunction(thread_, cid);
2104 body += LoadLocal(arg_receiver);
2105 body += LoadLocal(arg_offset_in_bytes);
2106 body += LoadLocal(arg_value);
2107 body += StaticCall(TokenPosition::kNoSource, native_function,
2108 kNumParameters, ICData::kNoRebind);
2109 }
2110 return body;
2111}
2112
2113Fragment FlowGraphBuilder::BuildTypedDataMemMove(const Function& function,
2114 classid_t cid) {
2115 ASSERT_EQUAL(parsed_function_->function().NumParameters(), 5);
2116 LocalVariable* arg_to = parsed_function_->RawParameterVariable(0);
2117 LocalVariable* arg_to_start = parsed_function_->RawParameterVariable(1);
2118 LocalVariable* arg_count = parsed_function_->RawParameterVariable(2);
2119 LocalVariable* arg_from = parsed_function_->RawParameterVariable(3);
2120 LocalVariable* arg_from_start = parsed_function_->RawParameterVariable(4);
2121
2122 Fragment body;
2123 // If we're copying at least this many elements, calling memmove via CCall
2124 // is faster than using the code currently emitted by MemoryCopy.
2125#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2126 // On X86, the breakpoint for using CCall instead of generating a loop via
2127 // MemoryCopy() is around the same as the largest benchmark (1048576 elements)
2128 // on the machines we use.
2129 const intptr_t kCopyLengthForCCall = 1024 * 1024;
2130#else
2131 // On other architectures, when the element size is less than a word,
2132 // we copy in word-sized chunks when possible to get back some speed without
2133 // increasing the number of emitted instructions for MemoryCopy too much, but
2134 // memmove is even more aggressive, copying in 64-byte chunks when possible.
2135 // Thus, the breakpoint for a call to memmove being faster is much lower for
2136 // our benchmarks than for X86.
2137 const intptr_t kCopyLengthForCCall = 1024;
2138#endif
2139
2140 JoinEntryInstr* done = BuildJoinEntry();
2141 TargetEntryInstr *is_small_enough, *is_too_large;
2142 body += LoadLocal(arg_count);
2143 body += IntConstant(kCopyLengthForCCall);
2144 body += SmiRelationalOp(Token::kLT);
2145 body += BranchIfTrue(&is_small_enough, &is_too_large);
2146
2147 Fragment use_instruction(is_small_enough);
2148 use_instruction += LoadLocal(arg_from);
2149 use_instruction += LoadLocal(arg_to);
2150 use_instruction += LoadLocal(arg_from_start);
2151 use_instruction += LoadLocal(arg_to_start);
2152 use_instruction += LoadLocal(arg_count);
2153 use_instruction += MemoryCopy(cid, cid,
2154 /*unboxed_inputs=*/false, /*can_overlap=*/true);
2155 use_instruction += Goto(done);
2156
2157 Fragment call_memmove(is_too_large);
2158 const intptr_t element_size = Instance::ElementSizeFor(cid);
2159 auto* const arg_reps =
2160 new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
2161 // First unbox the arguments to avoid any boxes being inserted between unsafe
2162 // untagged loads and their uses. Also adjust the length to be in bytes, since
2163 // that's what memmove expects.
2164 call_memmove += LoadLocal(arg_to_start);
2165 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2166 LocalVariable* to_start_unboxed = MakeTemporary("to_start_unboxed");
2167 call_memmove += LoadLocal(arg_from_start);
2168 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2169 LocalVariable* from_start_unboxed = MakeTemporary("from_start_unboxed");
2170 // Used for length in bytes calculations, since memmove expects a size_t.
2171 const Representation size_rep = kUnboxedUword;
2172 call_memmove += LoadLocal(arg_count);
2173 call_memmove += UnboxTruncate(size_rep);
2174 call_memmove += UnboxedIntConstant(element_size, size_rep);
2175 call_memmove +=
2176 BinaryIntegerOp(Token::kMUL, size_rep, /*is_truncating=*/true);
2177 LocalVariable* length_in_bytes = MakeTemporary("length_in_bytes");
2178 // dest: void*
2179 call_memmove += LoadLocal(arg_to);
2180 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2182 call_memmove += LoadLocal(to_start_unboxed);
2183 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2184 call_memmove += CalculateElementAddress(element_size);
2185 arg_reps->Add(kUntagged);
2186 // src: const void*
2187 call_memmove += LoadLocal(arg_from);
2188 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2190 call_memmove += LoadLocal(from_start_unboxed);
2191 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2192 call_memmove += CalculateElementAddress(element_size);
2193 arg_reps->Add(kUntagged);
2194 // n: size_t
2195 call_memmove += LoadLocal(length_in_bytes);
2196 arg_reps->Add(size_rep);
2197 // memmove(dest, src, n)
2198 call_memmove +=
2199 CallLeafRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
2200 // The returned address is unused.
2201 call_memmove += Drop();
2202 call_memmove += DropTemporary(&length_in_bytes);
2203 call_memmove += DropTemporary(&from_start_unboxed);
2204 call_memmove += DropTemporary(&to_start_unboxed);
2205 call_memmove += Goto(done);
2206
2207 body.current = done;
2208 body += NullConstant();
2209
2210 return body;
2211}
2212
2213Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
2214 const Function& function,
2215 classid_t cid) {
2216 const auto token_pos = function.token_pos();
2217 ASSERT(
2218 Thread::Current()->isolate_group()->class_table()->HasValidClassAt(cid));
2219
2220 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
2221 LocalVariable* length = parsed_function_->RawParameterVariable(1);
2222
2223 Fragment instructions;
2224 instructions += LoadLocal(length);
2225 // AllocateTypedData instruction checks that length is valid (a non-negative
2226 // Smi below maximum allowed length).
2227 instructions += AllocateTypedData(token_pos, cid);
2228 return instructions;
2229}
2230
2231Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
2232 TokenPosition position,
2233 const Function& target) {
2234 // The function cannot be local and have parent generic functions.
2235 ASSERT(!target.HasGenericParent());
2236 ASSERT(target.IsImplicitInstanceClosureFunction());
2237
2238 Fragment fragment;
2239 fragment += Constant(target);
2240 fragment += LoadLocal(parsed_function_->receiver_var());
2241 // The function signature can have uninstantiated class type parameters.
2242 const bool has_instantiator_type_args =
2243 !target.HasInstantiatedSignature(kCurrentClass);
2244 if (has_instantiator_type_args) {
2245 fragment += LoadInstantiatorTypeArguments();
2246 }
2247 fragment += AllocateClosure(position, has_instantiator_type_args,
2248 target.IsGeneric(), /*is_tear_off=*/true);
2249
2250 return fragment;
2251}
2252
2253Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
2254 const AbstractType& dst_type,
2255 const String& name_symbol) {
2256 return Fragment();
2257}
2258
2259bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
2260 TokenPosition position) {
2261 return position.IsDebugPause() && !function.is_native() &&
2262 function.is_debuggable();
2263}
2264
2265bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
2266 TokenPosition position) {
2267 if (!position.IsDebugPause()) {
2268 return false;
2269 }
2270 Definition* definition = value->definition();
2271 if (definition->IsConstant() || definition->IsLoadStaticField() ||
2272 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
2273 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
2274 return true;
2275 }
2276 if (auto const alloc = definition->AsAllocateClosure()) {
2277 return !alloc->known_function().IsNull();
2278 }
2279 return false;
2280}
2281
2282Fragment FlowGraphBuilder::EvaluateAssertion() {
2283 const Class& klass =
2284 Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::AssertionError()));
2285 ASSERT(!klass.IsNull());
2286 const auto& error = klass.EnsureIsFinalized(H.thread());
2287 ASSERT(error == Error::null());
2288 const Function& target = Function::ZoneHandle(
2289 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
2290 ASSERT(!target.IsNull());
2291 return StaticCall(TokenPosition::kNoSource, target, /* argument_count = */ 1,
2292 ICData::kStatic);
2293}
2294
2295Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
2296 Fragment instructions;
2297 LocalVariable* top_of_stack = MakeTemporary();
2298 instructions += LoadLocal(top_of_stack);
2299 instructions += AssertBool(position);
2300 instructions += Drop();
2301 return instructions;
2302}
2303
2304Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
2305 const String& dst_name,
2307 TokenPosition token_pos) {
2308 Fragment instructions;
2309 if (!dst_type.IsTopTypeForSubtyping()) {
2310 LocalVariable* top_of_stack = MakeTemporary();
2311 instructions += LoadLocal(top_of_stack);
2312 instructions +=
2313 AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
2314 instructions += Drop();
2315 }
2316 return instructions;
2317}
2318
2319Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
2320 TokenPosition position,
2321 const AbstractType& dst_type,
2322 const String& dst_name,
2324 Fragment instructions;
2325
2326 instructions += Constant(AbstractType::ZoneHandle(dst_type.ptr()));
2327
2328 if (!dst_type.IsInstantiated(kCurrentClass)) {
2329 instructions += LoadInstantiatorTypeArguments();
2330 } else {
2331 instructions += NullConstant();
2332 }
2333
2334 if (!dst_type.IsInstantiated(kFunctions)) {
2335 instructions += LoadFunctionTypeArguments();
2336 } else {
2337 instructions += NullConstant();
2338 }
2339
2340 instructions += AssertAssignable(position, dst_name, kind);
2341
2342 return instructions;
2343}
2344
2345Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
2346 const AbstractType& sub_type_value,
2347 const AbstractType& super_type_value,
2348 const String& dst_name_value) {
2349 Fragment instructions;
2350 instructions += LoadInstantiatorTypeArguments();
2351 instructions += LoadFunctionTypeArguments();
2352 instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.ptr()));
2353 instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.ptr()));
2354 instructions += Constant(String::ZoneHandle(Z, dst_name_value.ptr()));
2355 instructions += AssertSubtype(position);
2356 return instructions;
2357}
2358
2359Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
2360 Fragment instructions;
2361
2362 Value* dst_name = Pop();
2363 Value* super_type = Pop();
2364 Value* sub_type = Pop();
2365 Value* function_type_args = Pop();
2366 Value* instantiator_type_args = Pop();
2367
2368 AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
2369 InstructionSource(position), instantiator_type_args, function_type_args,
2370 sub_type, super_type, dst_name, GetNextDeoptId());
2371 instructions += Fragment(instr);
2372
2373 return instructions;
2374}
2375
2376void FlowGraphBuilder::BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
2377 Fragment* implicit_checks) {
2378 const Function& dart_function = parsed_function_->function();
2379
2380 const Function* forwarding_target = nullptr;
2381 if (parsed_function_->is_forwarding_stub()) {
2382 forwarding_target = parsed_function_->forwarding_stub_super_target();
2383 ASSERT(!forwarding_target->IsNull());
2384 }
2385
2386 TypeParameters& type_parameters = TypeParameters::Handle(Z);
2387 if (dart_function.IsFactory()) {
2388 type_parameters = Class::Handle(Z, dart_function.Owner()).type_parameters();
2389 } else {
2390 type_parameters = dart_function.type_parameters();
2391 }
2392 const intptr_t num_type_params = type_parameters.Length();
2393 if (num_type_params == 0) return;
2394 if (forwarding_target != nullptr) {
2395 type_parameters = forwarding_target->type_parameters();
2396 ASSERT(type_parameters.Length() == num_type_params);
2397 }
2398 if (type_parameters.AllDynamicBounds()) {
2399 return; // All bounds are dynamic.
2400 }
2401 TypeParameter& type_param = TypeParameter::Handle(Z);
2402 String& name = String::Handle(Z);
2403 AbstractType& bound = AbstractType::Handle(Z);
2404 Fragment check_bounds;
2405 for (intptr_t i = 0; i < num_type_params; ++i) {
2406 bound = type_parameters.BoundAt(i);
2407 if (bound.IsTopTypeForSubtyping()) {
2408 continue;
2409 }
2410
2411 switch (mode) {
2413 break;
2415 if (!type_parameters.IsGenericCovariantImplAt(i)) {
2416 continue;
2417 }
2418 break;
2420 if (type_parameters.IsGenericCovariantImplAt(i)) {
2421 continue;
2422 }
2423 break;
2424 }
2425
2426 name = type_parameters.NameAt(i);
2427
2428 if (forwarding_target != nullptr) {
2429 type_param = forwarding_target->TypeParameterAt(i);
2430 } else if (dart_function.IsFactory()) {
2431 type_param = Class::Handle(Z, dart_function.Owner()).TypeParameterAt(i);
2432 } else {
2433 type_param = dart_function.TypeParameterAt(i);
2434 }
2435 ASSERT(type_param.IsFinalized());
2436 check_bounds +=
2437 AssertSubtype(TokenPosition::kNoSource, type_param, bound, name);
2438 }
2439
2440 // Type arguments passed through partial instantiation are guaranteed to be
2441 // bounds-checked at the point of partial instantiation, so we don't need to
2442 // check them again at the call-site.
2443 if (dart_function.IsClosureFunction() && !check_bounds.is_empty() &&
2444 FLAG_eliminate_type_checks) {
2445 LocalVariable* closure = parsed_function_->ParameterVariable(0);
2446 *implicit_checks += TestDelayedTypeArgs(closure, /*present=*/{},
2447 /*absent=*/check_bounds);
2448 } else {
2449 *implicit_checks += check_bounds;
2450 }
2451}
2452
2453void FlowGraphBuilder::BuildArgumentTypeChecks(
2454 Fragment* explicit_checks,
2455 Fragment* implicit_checks,
2456 Fragment* implicit_redefinitions) {
2457 const Function& dart_function = parsed_function_->function();
2458
2459 const Function* forwarding_target = nullptr;
2460 if (parsed_function_->is_forwarding_stub()) {
2461 forwarding_target = parsed_function_->forwarding_stub_super_target();
2462 ASSERT(!forwarding_target->IsNull());
2463 }
2464
2465 const intptr_t num_params = dart_function.NumParameters();
2466 for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
2467 ++i) {
2468 LocalVariable* param = parsed_function_->ParameterVariable(i);
2469 const String& name = param->name();
2470 if (!param->needs_type_check()) {
2471 continue;
2472 }
2473 if (param->is_captured()) {
2474 param = parsed_function_->RawParameterVariable(i);
2475 }
2476
2477 const AbstractType* target_type = &param->static_type();
2478 if (forwarding_target != nullptr) {
2479 // We add 1 to the parameter index to account for the receiver.
2480 target_type =
2481 &AbstractType::ZoneHandle(Z, forwarding_target->ParameterTypeAt(i));
2482 }
2483
2484 if (target_type->IsTopTypeForSubtyping()) continue;
2485
2486 const bool is_covariant = param->is_explicit_covariant_parameter();
2487 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2488
2489 *checks += LoadLocal(param);
2490 *checks += AssertAssignableLoadTypeArguments(
2491 param->token_pos(), *target_type, name,
2492 AssertAssignableInstr::kParameterCheck);
2493 *checks += StoreLocal(param);
2494 *checks += Drop();
2495
2496 if (!is_covariant && implicit_redefinitions != nullptr && optimizing_) {
2497 // We generate slightly different code in optimized vs. un-optimized code,
2498 // which is ok since we don't allocate any deopt ids.
2499 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2500
2501 *implicit_redefinitions += LoadLocal(param);
2502 *implicit_redefinitions += RedefinitionWithType(*target_type);
2503 *implicit_redefinitions += StoreLocal(TokenPosition::kNoSource, param);
2504 *implicit_redefinitions += Drop();
2505 }
2506 }
2507}
2508
2509BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2510 PrologueInfo* prologue_info) {
2511 const bool compiling_for_osr = IsCompiledForOsr();
2512
2513 kernel::PrologueBuilder prologue_builder(
2514 parsed_function_, last_used_block_id_, compiling_for_osr, IsInlining());
2515 BlockEntryInstr* instruction_cursor =
2516 prologue_builder.BuildPrologue(normal_entry, prologue_info);
2517
2518 last_used_block_id_ = prologue_builder.last_used_block_id();
2519
2520 return instruction_cursor;
2521}
2522
2523ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(const Function& function) {
2524 if (!function.HasOptionalNamedParameters()) {
2525 return Array::null();
2526 }
2527
2528 const intptr_t num_fixed_params = function.num_fixed_parameters();
2529 const intptr_t num_opt_params = function.NumOptionalNamedParameters();
2530 const auto& names = Array::Handle(Z, Array::New(num_opt_params, Heap::kOld));
2531 auto& name = String::Handle(Z);
2532 for (intptr_t i = 0; i < num_opt_params; ++i) {
2533 name = function.ParameterNameAt(num_fixed_params + i);
2534 names.SetAt(i, name);
2535 }
2536 return names.ptr();
2537}
2538
2539Fragment FlowGraphBuilder::PushExplicitParameters(
2540 const Function& function,
2541 const Function& target /* = Function::null_function()*/) {
2542 Fragment instructions;
2543 for (intptr_t i = function.NumImplicitParameters(),
2544 n = function.NumParameters();
2545 i < n; ++i) {
2546 Fragment push_param = LoadLocal(parsed_function_->ParameterVariable(i));
2547 if (!target.IsNull() && target.is_unboxed_parameter_at(i)) {
2548 Representation to;
2549 if (target.is_unboxed_integer_parameter_at(i)) {
2550 to = kUnboxedInt64;
2551 } else {
2552 ASSERT(target.is_unboxed_double_parameter_at(i));
2553 to = kUnboxedDouble;
2554 }
2555 const auto unbox = UnboxInstr::Create(to, Pop(), DeoptId::kNone,
2557 Push(unbox);
2558 push_param += Fragment(unbox);
2559 }
2560 instructions += push_param;
2561 }
2562 return instructions;
2563}
2564
2565FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2566 const Function& method) {
2567 // A method extractor is the implicit getter for a method.
2568 const Function& function =
2569 Function::ZoneHandle(Z, method.extracted_method_closure());
2570
2571 graph_entry_ =
2572 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2573
2574 auto normal_entry = BuildFunctionEntry(graph_entry_);
2575 graph_entry_->set_normal_entry(normal_entry);
2576
2577 Fragment body(normal_entry);
2578 body += CheckStackOverflowInPrologue(method.token_pos());
2579 body += BuildImplicitClosureCreation(TokenPosition::kNoSource, function);
2580 body += Return(TokenPosition::kNoSource);
2581
2582 // There is no prologue code for a method extractor.
2583 PrologueInfo prologue_info(-1, -1);
2584 return new (Z)
2585 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2586 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2587}
2588
2589FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2590 const Function& function) {
2591 // This function is specialized for a receiver class, a method name, and
2592 // the arguments descriptor at a call site.
2593 const ArgumentsDescriptor descriptor(saved_args_desc_array());
2594
2595 graph_entry_ =
2596 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2597
2598 auto normal_entry = BuildFunctionEntry(graph_entry_);
2599 graph_entry_->set_normal_entry(normal_entry);
2600
2601 PrologueInfo prologue_info(-1, -1);
2602 BlockEntryInstr* instruction_cursor =
2603 BuildPrologue(normal_entry, &prologue_info);
2604
2605 Fragment body(instruction_cursor);
2606 body += CheckStackOverflowInPrologue(function.token_pos());
2607
2608 // The receiver is the first argument to noSuchMethod, and it is the first
2609 // argument passed to the dispatcher function.
2610 body += LoadLocal(parsed_function_->ParameterVariable(0));
2611
2612 // The second argument to noSuchMethod is an invocation mirror. Push the
2613 // arguments for allocating the invocation mirror. First, the name.
2614 body += Constant(String::ZoneHandle(Z, function.name()));
2615
2616 // Second, the arguments descriptor.
2618
2619 // Third, an array containing the original arguments. Create it and fill
2620 // it in.
2621 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2623 body += IntConstant(receiver_index + descriptor.Size());
2624 body += CreateArray();
2625 LocalVariable* array = MakeTemporary();
2626 if (receiver_index > 0) {
2627 LocalVariable* type_args = parsed_function_->function_type_arguments();
2628 ASSERT(type_args != nullptr);
2629 body += LoadLocal(array);
2630 body += IntConstant(0);
2631 body += LoadLocal(type_args);
2632 body += StoreIndexed(kArrayCid);
2633 }
2634 for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
2635 body += LoadLocal(array);
2636 body += IntConstant(receiver_index + i);
2637 body += LoadLocal(parsed_function_->ParameterVariable(i));
2638 body += StoreIndexed(kArrayCid);
2639 }
2640 String& name = String::Handle(Z);
2641 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
2642 const intptr_t parameter_index = descriptor.PositionAt(i);
2643 name = descriptor.NameAt(i);
2644 name = Symbols::New(H.thread(), name);
2645 body += LoadLocal(array);
2646 body += IntConstant(receiver_index + parameter_index);
2647 body += LoadLocal(parsed_function_->ParameterVariable(parameter_index));
2648 body += StoreIndexed(kArrayCid);
2649 }
2650
2651 // Fourth, false indicating this is not a super NoSuchMethod.
2652 body += Constant(Bool::False());
2653
2654 const Class& mirror_class =
2655 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
2656 ASSERT(!mirror_class.IsNull());
2657 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
2658 ASSERT(error == Error::null());
2659 const Function& allocation_function = Function::ZoneHandle(
2660 Z, mirror_class.LookupStaticFunction(
2661 Library::PrivateCoreLibName(Symbols::AllocateInvocationMirror())));
2662 ASSERT(!allocation_function.IsNull());
2663 body += StaticCall(TokenPosition::kMinSource, allocation_function,
2664 /* argument_count = */ 4, ICData::kStatic);
2665
2666 const int kTypeArgsLen = 0;
2667 ArgumentsDescriptor two_arguments(
2668 Array::Handle(Z, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, 2)));
2669 Function& no_such_method = Function::ZoneHandle(
2671 Class::Handle(Z, function.Owner()), Symbols::NoSuchMethod(),
2672 two_arguments, /*allow_add=*/true));
2673 if (no_such_method.IsNull()) {
2674 // If noSuchMethod is not found on the receiver class, call
2675 // Object.noSuchMethod.
2677 Class::Handle(Z, IG->object_store()->object_class()),
2678 Symbols::NoSuchMethod(), two_arguments, /*allow_add=*/true);
2679 }
2680 body += StaticCall(TokenPosition::kMinSource, no_such_method,
2681 /* argument_count = */ 2, ICData::kNSMDispatch);
2682 body += Return(TokenPosition::kNoSource);
2683
2684 return new (Z)
2685 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2686 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2687}
2688
2689FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2690 const Function& function) {
2691 graph_entry_ =
2692 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2693
2694 auto normal_entry = BuildFunctionEntry(graph_entry_);
2695 graph_entry_->set_normal_entry(normal_entry);
2696
2697 JoinEntryInstr* nsm = BuildJoinEntry();
2698 JoinEntryInstr* done = BuildJoinEntry();
2699
2700 Fragment body(normal_entry);
2701 body += CheckStackOverflowInPrologue(function.token_pos());
2702
2703 String& name = String::ZoneHandle(Z, function.name());
2706
2707 // Get an array of field names.
2708 const Class& cls = Class::Handle(Z, IG->class_table()->At(kRecordCid));
2709 const auto& error = cls.EnsureIsFinalized(thread_);
2710 ASSERT(error == Error::null());
2711 const Function& get_field_names_function = Function::ZoneHandle(
2712 Z, cls.LookupFunctionAllowPrivate(Symbols::Get_fieldNames()));
2713 ASSERT(!get_field_names_function.IsNull());
2714 body += LoadLocal(parsed_function_->receiver_var());
2715 body += StaticCall(TokenPosition::kNoSource, get_field_names_function, 1,
2716 ICData::kNoRebind);
2717 LocalVariable* field_names = MakeTemporary("field_names");
2718
2719 body += LoadLocal(field_names);
2720 body += LoadNativeField(Slot::Array_length());
2721 LocalVariable* num_named = MakeTemporary("num_named");
2722
2723 // num_positional = num_fields - field_names.length
2724 body += LoadLocal(parsed_function_->receiver_var());
2725 body += LoadNativeField(Slot::Record_shape());
2727 body += SmiBinaryOp(Token::kBIT_AND);
2728 body += LoadLocal(num_named);
2729 body += SmiBinaryOp(Token::kSUB);
2730 LocalVariable* num_positional = MakeTemporary("num_positional");
2731
2732 const intptr_t field_index =
2734 if (field_index >= 0) {
2735 // Get positional record field by index.
2736 body += IntConstant(field_index);
2737 body += LoadLocal(num_positional);
2738 body += SmiRelationalOp(Token::kLT);
2739 TargetEntryInstr* valid_index;
2740 TargetEntryInstr* invalid_index;
2741 body += BranchIfTrue(&valid_index, &invalid_index);
2742
2743 body.current = valid_index;
2744 body += LoadLocal(parsed_function_->receiver_var());
2746 thread_, compiler::target::Record::field_offset(field_index)));
2747
2748 body += StoreLocal(TokenPosition::kNoSource,
2749 parsed_function_->expression_temp_var());
2750 body += Drop();
2751 body += Goto(done);
2752
2753 body.current = invalid_index;
2754 }
2755
2756 // Search field among named fields.
2757 body += IntConstant(0);
2758 body += LoadLocal(num_named);
2759 body += SmiRelationalOp(Token::kLT);
2760 TargetEntryInstr* has_named_fields;
2761 TargetEntryInstr* no_named_fields;
2762 body += BranchIfTrue(&has_named_fields, &no_named_fields);
2763
2764 Fragment(no_named_fields) + Goto(nsm);
2765 body.current = has_named_fields;
2766
2767 LocalVariable* index = parsed_function_->expression_temp_var();
2768 body += IntConstant(0);
2769 body += StoreLocal(TokenPosition::kNoSource, index);
2770 body += Drop();
2771
2772 JoinEntryInstr* loop = BuildJoinEntry();
2773 body += Goto(loop);
2774 body.current = loop;
2775
2776 body += LoadLocal(field_names);
2777 body += LoadLocal(index);
2778 body += LoadIndexed(kArrayCid,
2780 body += Constant(name);
2781 TargetEntryInstr* found;
2782 TargetEntryInstr* continue_search;
2783 body += BranchIfEqual(&found, &continue_search);
2784
2785 body.current = continue_search;
2786 body += LoadLocal(index);
2787 body += IntConstant(1);
2788 body += SmiBinaryOp(Token::kADD);
2789 body += StoreLocal(TokenPosition::kNoSource, index);
2790 body += Drop();
2791
2792 body += LoadLocal(index);
2793 body += LoadLocal(num_named);
2794 body += SmiRelationalOp(Token::kLT);
2795 TargetEntryInstr* has_more_fields;
2796 TargetEntryInstr* no_more_fields;
2797 body += BranchIfTrue(&has_more_fields, &no_more_fields);
2798
2799 Fragment(has_more_fields) + Goto(loop);
2800 Fragment(no_more_fields) + Goto(nsm);
2801
2802 body.current = found;
2803
2804 body += LoadLocal(parsed_function_->receiver_var());
2805
2806 body += LoadLocal(num_positional);
2807 body += LoadLocal(index);
2808 body += SmiBinaryOp(Token::kADD);
2809
2810 body += LoadIndexed(kRecordCid,
2812
2813 body += StoreLocal(TokenPosition::kNoSource,
2814 parsed_function_->expression_temp_var());
2815 body += Drop();
2816 body += Goto(done);
2817
2818 body.current = done;
2819
2820 body += LoadLocal(parsed_function_->expression_temp_var());
2821 body += DropTempsPreserveTop(3); // field_names, num_named, num_positional
2822 body += Return(TokenPosition::kNoSource);
2823
2824 Fragment throw_nsm(nsm);
2825 throw_nsm += LoadLocal(parsed_function_->receiver_var());
2826 throw_nsm += ThrowNoSuchMethodError(TokenPosition::kNoSource, function,
2827 /*incompatible_arguments=*/false,
2828 /*receiver_pushed=*/true);
2829 throw_nsm += ThrowException(TokenPosition::kNoSource); // Close graph.
2830
2831 // There is no prologue code for a record field getter.
2832 PrologueInfo prologue_info(-1, -1);
2833 return new (Z)
2834 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2835 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2836}
2837
2838// Information used by the various dynamic closure call fragment builders.
2842 const Array& arguments_descriptor_array,
2846 descriptor(arguments_descriptor_array),
2848
2853
2854 // Set up by BuildClosureCallDefaultTypeHandling() when needed. These values
2855 // are read-only, so they don't need real local variables and are created
2856 // using MakeTemporary().
2870};
2871
2872Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2873 const ClosureCallInfo& info,
2874 Fragment generic,
2875 Fragment not_generic) {
2876 JoinEntryInstr* after_branch = BuildJoinEntry();
2877
2879 check += LoadLocal(info.type_parameters);
2880 TargetEntryInstr* is_not_generic;
2881 TargetEntryInstr* is_generic;
2882 check += BranchIfNull(&is_not_generic, &is_generic);
2883
2884 generic.Prepend(is_generic);
2885 generic += Goto(after_branch);
2886
2887 not_generic.Prepend(is_not_generic);
2888 not_generic += Goto(after_branch);
2889
2890 return Fragment(check.entry, after_branch);
2891}
2892
2893Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2894 const ClosureCallInfo& info,
2895 Fragment set,
2896 Fragment not_set) {
2897 Fragment check_required;
2898 // We calculate the index to dereference in the parameter names array.
2899 check_required += LoadLocal(info.vars->current_param_index);
2900 check_required +=
2902 check_required += SmiBinaryOp(Token::kSHR);
2903 check_required += LoadLocal(info.num_opt_params);
2904 check_required += SmiBinaryOp(Token::kADD);
2905 LocalVariable* flags_index = MakeTemporary("flags_index"); // Read-only.
2906
2907 // One read-only stack value (flag_index) that must be dropped
2908 // after we rejoin at after_check.
2909 JoinEntryInstr* after_check = BuildJoinEntry();
2910
2911 // Now we check to see if the flags index is within the bounds of the
2912 // parameters names array. If not, it cannot be required.
2913 check_required += LoadLocal(flags_index);
2914 check_required += LoadLocal(info.named_parameter_names);
2915 check_required += LoadNativeField(Slot::Array_length());
2916 check_required += SmiRelationalOp(Token::kLT);
2917 TargetEntryInstr* valid_index;
2918 TargetEntryInstr* invalid_index;
2919 check_required += BranchIfTrue(&valid_index, &invalid_index);
2920
2921 JoinEntryInstr* join_not_set = BuildJoinEntry();
2922
2923 Fragment(invalid_index) + Goto(join_not_set);
2924
2925 // Otherwise, we need to retrieve the value. We're guaranteed the Smis in
2926 // the flag slots are non-null, so after loading we can immediate check
2927 // the required flag bit for the given named parameter.
2928 check_required.current = valid_index;
2929 check_required += LoadLocal(info.named_parameter_names);
2930 check_required += LoadLocal(flags_index);
2931 check_required += LoadIndexed(
2932 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2933 check_required += LoadLocal(info.vars->current_param_index);
2934 check_required +=
2936 check_required += SmiBinaryOp(Token::kBIT_AND);
2937 // If the below changes, we'll need to multiply by the number of parameter
2938 // flags before shifting.
2939 static_assert(compiler::target::kNumParameterFlags == 1,
2940 "IL builder assumes only one flag bit per parameter");
2941 check_required += SmiBinaryOp(Token::kSHR);
2942 check_required +=
2944 check_required += SmiBinaryOp(Token::kBIT_AND);
2945 check_required += IntConstant(0);
2946 TargetEntryInstr* is_not_set;
2947 TargetEntryInstr* is_set;
2948 check_required += BranchIfEqual(&is_not_set, &is_set);
2949
2950 Fragment(is_not_set) + Goto(join_not_set);
2951
2952 set.Prepend(is_set);
2953 set += Goto(after_check);
2954
2955 not_set.Prepend(join_not_set);
2956 not_set += Goto(after_check);
2957
2958 // After rejoining, drop the introduced temporaries.
2959 check_required.current = after_check;
2960 check_required += DropTemporary(&flags_index);
2961 return check_required;
2962}
2963
2964Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2965 const ClosureCallInfo& info) {
2966 if (info.descriptor.TypeArgsLen() > 0) {
2967 ASSERT(parsed_function_->function_type_arguments() != nullptr);
2968 // A TAV was provided, so we don't need default type argument handling
2969 // and can just take the arguments we were given.
2970 Fragment store_provided;
2971 store_provided += LoadLocal(parsed_function_->function_type_arguments());
2972 store_provided += StoreLocal(info.vars->function_type_args);
2973 store_provided += Drop();
2974 return store_provided;
2975 }
2976
2977 // Load the defaults, instantiating or replacing them with the other type
2978 // arguments as appropriate.
2979 Fragment store_default;
2980 store_default += LoadLocal(info.closure);
2981 store_default += LoadNativeField(Slot::Closure_function());
2982 store_default += LoadNativeField(Slot::Function_data());
2983 LocalVariable* closure_data = MakeTemporary("closure_data");
2984
2985 store_default += LoadLocal(closure_data);
2986 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
2987 ClosureData::PackedInstantiationMode>(Slot::ClosureData_packed_fields());
2988 LocalVariable* default_tav_kind = MakeTemporary("default_tav_kind");
2989
2990 // Two locals to drop after join, closure_data and default_tav_kind.
2991 JoinEntryInstr* done = BuildJoinEntry();
2992
2993 store_default += LoadLocal(default_tav_kind);
2994 TargetEntryInstr* is_instantiated;
2995 TargetEntryInstr* is_not_instantiated;
2996 store_default +=
2997 IntConstant(static_cast<intptr_t>(InstantiationMode::kIsInstantiated));
2998 store_default += BranchIfEqual(&is_instantiated, &is_not_instantiated);
2999 store_default.current = is_not_instantiated; // Check next case.
3000 store_default += LoadLocal(default_tav_kind);
3001 TargetEntryInstr* needs_instantiation;
3002 TargetEntryInstr* can_share;
3003 store_default += IntConstant(
3004 static_cast<intptr_t>(InstantiationMode::kNeedsInstantiation));
3005 store_default += BranchIfEqual(&needs_instantiation, &can_share);
3006 store_default.current = can_share; // Check next case.
3007 store_default += LoadLocal(default_tav_kind);
3008 TargetEntryInstr* can_share_instantiator;
3009 TargetEntryInstr* can_share_function;
3010 store_default += IntConstant(static_cast<intptr_t>(
3012 store_default += BranchIfEqual(&can_share_instantiator, &can_share_function);
3013
3014 Fragment instantiated(is_instantiated);
3015 instantiated += LoadLocal(info.type_parameters);
3016 instantiated += LoadNativeField(Slot::TypeParameters_defaults());
3017 instantiated += StoreLocal(info.vars->function_type_args);
3018 instantiated += Drop();
3019 instantiated += Goto(done);
3020
3021 Fragment do_instantiation(needs_instantiation);
3022 // Load the instantiator type arguments.
3023 do_instantiation += LoadLocal(info.instantiator_type_args);
3024 // Load the parent function type arguments. (No local function type arguments
3025 // can be used within the defaults).
3026 do_instantiation += LoadLocal(info.parent_function_type_args);
3027 // Load the default type arguments to instantiate.
3028 do_instantiation += LoadLocal(info.type_parameters);
3029 do_instantiation += LoadNativeField(Slot::TypeParameters_defaults());
3030 do_instantiation += InstantiateDynamicTypeArguments();
3031 do_instantiation += StoreLocal(info.vars->function_type_args);
3032 do_instantiation += Drop();
3033 do_instantiation += Goto(done);
3034
3035 Fragment share_instantiator(can_share_instantiator);
3036 share_instantiator += LoadLocal(info.instantiator_type_args);
3037 share_instantiator += StoreLocal(info.vars->function_type_args);
3038 share_instantiator += Drop();
3039 share_instantiator += Goto(done);
3040
3041 Fragment share_function(can_share_function);
3042 // Since the defaults won't have local type parameters, these must all be
3043 // from the parent function type arguments, so we can just use it.
3044 share_function += LoadLocal(info.parent_function_type_args);
3045 share_function += StoreLocal(info.vars->function_type_args);
3046 share_function += Drop();
3047 share_function += Goto(done);
3048
3049 store_default.current = done; // Return here after branching.
3050 store_default += DropTemporary(&default_tav_kind);
3051 store_default += DropTemporary(&closure_data);
3052
3053 Fragment store_delayed;
3054 store_delayed += LoadLocal(info.closure);
3055 store_delayed += LoadNativeField(Slot::Closure_delayed_type_arguments());
3056 store_delayed += StoreLocal(info.vars->function_type_args);
3057 store_delayed += Drop();
3058
3059 // Use the delayed type args if present, else the default ones.
3060 return TestDelayedTypeArgs(info.closure, store_delayed, store_default);
3061}
3062
3063Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
3064 const ClosureCallInfo& info) {
3065 // When no named arguments are provided, we just need to check for possible
3066 // required named arguments.
3067 if (info.descriptor.NamedCount() == 0) {
3068 // If the below changes, we can no longer assume that flag slots existing
3069 // means there are required parameters.
3070 static_assert(compiler::target::kNumParameterFlags == 1,
3071 "IL builder assumes only one flag bit per parameter");
3072 // No named args were provided, so check for any required named params.
3073 // Here, we assume that the only parameter flag saved is the required bit
3074 // for named parameters. If this changes, we'll need to check each flag
3075 // entry appropriately for any set required bits.
3076 Fragment has_any;
3077 has_any += LoadLocal(info.num_opt_params);
3078 has_any += LoadLocal(info.named_parameter_names);
3079 has_any += LoadNativeField(Slot::Array_length());
3080 TargetEntryInstr* no_required;
3081 TargetEntryInstr* has_required;
3082 has_any += BranchIfEqual(&no_required, &has_required);
3083
3084 Fragment(has_required) + Goto(info.throw_no_such_method);
3085
3086 return Fragment(has_any.entry, no_required);
3087 }
3088
3089 // Otherwise, we need to loop through the parameter names to check the names
3090 // of named arguments for validity (and possibly missing required ones).
3091 Fragment check_names;
3092 check_names += LoadLocal(info.vars->current_param_index);
3093 LocalVariable* old_index = MakeTemporary("old_index"); // Read-only.
3094 check_names += LoadLocal(info.vars->current_num_processed);
3095 LocalVariable* old_processed = MakeTemporary("old_processed"); // Read-only.
3096
3097 // Two local stack values (old_index, old_processed) to drop after rejoining
3098 // at done.
3099 JoinEntryInstr* loop = BuildJoinEntry();
3100 JoinEntryInstr* done = BuildJoinEntry();
3101
3102 check_names += IntConstant(0);
3103 check_names += StoreLocal(info.vars->current_num_processed);
3104 check_names += Drop();
3105 check_names += IntConstant(0);
3106 check_names += StoreLocal(info.vars->current_param_index);
3107 check_names += Drop();
3108 check_names += Goto(loop);
3109
3110 Fragment loop_check(loop);
3111 loop_check += LoadLocal(info.vars->current_param_index);
3112 loop_check += LoadLocal(info.num_opt_params);
3113 loop_check += SmiRelationalOp(Token::kLT);
3114 TargetEntryInstr* no_more;
3115 TargetEntryInstr* more;
3116 loop_check += BranchIfTrue(&more, &no_more);
3117
3118 Fragment(no_more) + Goto(done);
3119
3120 Fragment loop_body(more);
3121 // First load the name we need to check against.
3122 loop_body += LoadLocal(info.named_parameter_names);
3123 loop_body += LoadLocal(info.vars->current_param_index);
3124 loop_body += LoadIndexed(
3125 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3126 LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
3127
3128 // One additional local value on the stack within the loop body (param_name)
3129 // that should be dropped after rejoining at loop_incr.
3130 JoinEntryInstr* loop_incr = BuildJoinEntry();
3131
3132 // Now iterate over the ArgumentsDescriptor names and check for a match.
3133 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3134 const auto& name = String::ZoneHandle(Z, info.descriptor.NameAt(i));
3135 loop_body += Constant(name);
3136 loop_body += LoadLocal(param_name);
3137 TargetEntryInstr* match;
3138 TargetEntryInstr* mismatch;
3139 loop_body += BranchIfEqual(&match, &mismatch);
3140 loop_body.current = mismatch;
3141
3142 // We have a match, so go to the next name after storing the corresponding
3143 // parameter index on the stack and incrementing the number of matched
3144 // arguments. (No need to check the required bit for provided parameters.)
3145 Fragment matched(match);
3146 matched += LoadLocal(info.vars->current_param_index);
3147 matched += LoadLocal(info.num_fixed_params);
3148 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3149 matched += StoreLocal(info.vars->named_argument_parameter_indices.At(i));
3150 matched += Drop();
3151 matched += LoadLocal(info.vars->current_num_processed);
3152 matched += IntConstant(1);
3153 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3154 matched += StoreLocal(info.vars->current_num_processed);
3155 matched += Drop();
3156 matched += Goto(loop_incr);
3157 }
3158
3159 // None of the names in the arguments descriptor matched, so check if this
3160 // is a required parameter.
3161 loop_body += TestClosureFunctionNamedParameterRequired(
3162 info,
3163 /*set=*/Goto(info.throw_no_such_method),
3164 /*not_set=*/{});
3165
3166 loop_body += Goto(loop_incr);
3167
3168 Fragment incr_index(loop_incr);
3169 incr_index += DropTemporary(&param_name);
3170 incr_index += LoadLocal(info.vars->current_param_index);
3171 incr_index += IntConstant(1);
3172 incr_index += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3173 incr_index += StoreLocal(info.vars->current_param_index);
3174 incr_index += Drop();
3175 incr_index += Goto(loop);
3176
3177 Fragment check_processed(done);
3178 check_processed += LoadLocal(info.vars->current_num_processed);
3179 check_processed += IntConstant(info.descriptor.NamedCount());
3180 TargetEntryInstr* all_processed;
3181 TargetEntryInstr* bad_name;
3182 check_processed += BranchIfEqual(&all_processed, &bad_name);
3183
3184 // Didn't find a matching parameter name for at least one argument name.
3185 Fragment(bad_name) + Goto(info.throw_no_such_method);
3186
3187 // Drop the temporaries at the end of the fragment.
3188 check_names.current = all_processed;
3189 check_names += LoadLocal(old_processed);
3190 check_names += StoreLocal(info.vars->current_num_processed);
3191 check_names += Drop();
3192 check_names += DropTemporary(&old_processed);
3193 check_names += LoadLocal(old_index);
3194 check_names += StoreLocal(info.vars->current_param_index);
3195 check_names += Drop();
3196 check_names += DropTemporary(&old_index);
3197 return check_names;
3198}
3199
3200Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
3201 const ClosureCallInfo& info) {
3202 Fragment check_entry;
3203 // We only need to check the length of any explicitly provided type arguments.
3204 if (info.descriptor.TypeArgsLen() > 0) {
3205 Fragment check_type_args_length;
3206 check_type_args_length += LoadLocal(info.type_parameters);
3207 TargetEntryInstr* null;
3208 TargetEntryInstr* not_null;
3209 check_type_args_length += BranchIfNull(&null, &not_null);
3210 check_type_args_length.current = not_null; // Continue in non-error case.
3211 check_type_args_length += LoadLocal(info.signature);
3212 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
3214 Slot::FunctionType_packed_type_parameter_counts());
3215 check_type_args_length += IntConstant(info.descriptor.TypeArgsLen());
3216 TargetEntryInstr* equal;
3217 TargetEntryInstr* not_equal;
3218 check_type_args_length += BranchIfEqual(&equal, &not_equal);
3219 check_type_args_length.current = equal; // Continue in non-error case.
3220
3221 // The function is not generic.
3222 Fragment(null) + Goto(info.throw_no_such_method);
3223
3224 // An incorrect number of type arguments were passed.
3225 Fragment(not_equal) + Goto(info.throw_no_such_method);
3226
3227 // Type arguments should not be provided if there are delayed type
3228 // arguments, as then the closure itself is not generic.
3229 check_entry += TestDelayedTypeArgs(
3230 info.closure, /*present=*/Goto(info.throw_no_such_method),
3231 /*absent=*/check_type_args_length);
3232 }
3233
3234 check_entry += LoadLocal(info.has_named_params);
3235 TargetEntryInstr* has_named;
3236 TargetEntryInstr* has_positional;
3237 check_entry += BranchIfTrue(&has_named, &has_positional);
3238 JoinEntryInstr* join_after_optional = BuildJoinEntry();
3239 check_entry.current = join_after_optional;
3240
3241 if (info.descriptor.NamedCount() > 0) {
3242 // No reason to continue checking, as this function doesn't take named args.
3243 Fragment(has_positional) + Goto(info.throw_no_such_method);
3244 } else {
3245 Fragment check_pos(has_positional);
3246 check_pos += LoadLocal(info.num_fixed_params);
3247 check_pos += IntConstant(info.descriptor.PositionalCount());
3248 check_pos += SmiRelationalOp(Token::kLTE);
3249 TargetEntryInstr* enough;
3250 TargetEntryInstr* too_few;
3251 check_pos += BranchIfTrue(&enough, &too_few);
3252 check_pos.current = enough;
3253
3254 Fragment(too_few) + Goto(info.throw_no_such_method);
3255
3256 check_pos += IntConstant(info.descriptor.PositionalCount());
3257 check_pos += LoadLocal(info.num_max_params);
3258 check_pos += SmiRelationalOp(Token::kLTE);
3259 TargetEntryInstr* valid;
3260 TargetEntryInstr* too_many;
3261 check_pos += BranchIfTrue(&valid, &too_many);
3262 check_pos.current = valid;
3263
3264 Fragment(too_many) + Goto(info.throw_no_such_method);
3265
3266 check_pos += Goto(join_after_optional);
3267 }
3268
3269 Fragment check_named(has_named);
3270
3271 TargetEntryInstr* same;
3272 TargetEntryInstr* different;
3273 check_named += LoadLocal(info.num_fixed_params);
3274 check_named += IntConstant(info.descriptor.PositionalCount());
3275 check_named += BranchIfEqual(&same, &different);
3276 check_named.current = same;
3277
3278 Fragment(different) + Goto(info.throw_no_such_method);
3279
3280 if (info.descriptor.NamedCount() > 0) {
3281 check_named += IntConstant(info.descriptor.NamedCount());
3282 check_named += LoadLocal(info.num_opt_params);
3283 check_named += SmiRelationalOp(Token::kLTE);
3284 TargetEntryInstr* valid;
3285 TargetEntryInstr* too_many;
3286 check_named += BranchIfTrue(&valid, &too_many);
3287 check_named.current = valid;
3288
3289 Fragment(too_many) + Goto(info.throw_no_such_method);
3290 }
3291
3292 // Check the names for optional arguments. If applicable, also check that all
3293 // required named parameters are provided.
3294 check_named += BuildClosureCallNamedArgumentsCheck(info);
3295 check_named += Goto(join_after_optional);
3296
3297 check_entry.current = join_after_optional;
3298 return check_entry;
3299}
3300
3301Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
3302 const ClosureCallInfo& info) {
3303 JoinEntryInstr* done = BuildJoinEntry();
3304 JoinEntryInstr* loop = BuildJoinEntry();
3305
3306 // We assume that the value stored in :t_type_parameters is not null (i.e.,
3307 // the function stored in :t_function is generic).
3308 Fragment loop_init;
3309
3310 // A null bounds vector represents a vector of dynamic and no check is needed.
3311 loop_init += LoadLocal(info.type_parameters);
3312 loop_init += LoadNativeField(Slot::TypeParameters_bounds());
3313 TargetEntryInstr* null_bounds;
3314 TargetEntryInstr* non_null_bounds;
3315 loop_init += BranchIfNull(&null_bounds, &non_null_bounds);
3316
3317 Fragment(null_bounds) + Goto(done);
3318
3319 loop_init.current = non_null_bounds;
3320 // Loop over the type parameters array.
3321 loop_init += IntConstant(0);
3322 loop_init += StoreLocal(info.vars->current_param_index);
3323 loop_init += Drop();
3324 loop_init += Goto(loop);
3325
3326 Fragment loop_check(loop);
3327 loop_check += LoadLocal(info.vars->current_param_index);
3328 loop_check += LoadLocal(info.num_type_parameters);
3329 loop_check += SmiRelationalOp(Token::kLT);
3330 TargetEntryInstr* more;
3331 TargetEntryInstr* no_more;
3332 loop_check += BranchIfTrue(&more, &no_more);
3333
3334 Fragment(no_more) + Goto(done);
3335
3336 Fragment loop_test_flag(more);
3337 JoinEntryInstr* next = BuildJoinEntry();
3338 JoinEntryInstr* check = BuildJoinEntry();
3339 loop_test_flag += LoadLocal(info.type_parameter_flags);
3340 TargetEntryInstr* null_flags;
3341 TargetEntryInstr* non_null_flags;
3342 loop_test_flag += BranchIfNull(&null_flags, &non_null_flags);
3343
3344 Fragment(null_flags) + Goto(check); // Check type if null (non-covariant).
3345
3346 loop_test_flag.current = non_null_flags; // Test flags if not null.
3347 loop_test_flag += LoadLocal(info.type_parameter_flags);
3348 loop_test_flag += LoadLocal(info.vars->current_param_index);
3350 loop_test_flag += SmiBinaryOp(Token::kSHR);
3351 loop_test_flag += LoadIndexed(
3352 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3353 loop_test_flag += LoadLocal(info.vars->current_param_index);
3355 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3356 loop_test_flag += SmiBinaryOp(Token::kSHR);
3357 loop_test_flag += IntConstant(1);
3358 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3359 loop_test_flag += IntConstant(0);
3360 TargetEntryInstr* is_noncovariant;
3361 TargetEntryInstr* is_covariant;
3362 loop_test_flag += BranchIfEqual(&is_noncovariant, &is_covariant);
3363
3364 Fragment(is_covariant) + Goto(next); // Continue if covariant.
3365 Fragment(is_noncovariant) + Goto(check); // Check type if non-covariant.
3366
3367 Fragment loop_prep_type_param(check);
3368 JoinEntryInstr* dynamic_type_param = BuildJoinEntry();
3369 JoinEntryInstr* call = BuildJoinEntry();
3370
3371 // Load type argument already stored in function_type_args if non null.
3372 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3373 TargetEntryInstr* null_ftav;
3374 TargetEntryInstr* non_null_ftav;
3375 loop_prep_type_param += BranchIfNull(&null_ftav, &non_null_ftav);
3376
3377 Fragment(null_ftav) + Goto(dynamic_type_param);
3378
3379 loop_prep_type_param.current = non_null_ftav;
3380 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3381 loop_prep_type_param += LoadLocal(info.vars->current_param_index);
3382 loop_prep_type_param += LoadLocal(info.num_parent_type_args);
3383 loop_prep_type_param += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3384 loop_prep_type_param += LoadIndexed(
3385 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3386 loop_prep_type_param += StoreLocal(info.vars->current_type_param);
3387 loop_prep_type_param += Drop();
3388 loop_prep_type_param += Goto(call);
3389
3390 Fragment loop_dynamic_type_param(dynamic_type_param);
3391 // If function_type_args is null, the instantiated type param is dynamic.
3392 loop_dynamic_type_param += Constant(Type::ZoneHandle(Type::DynamicType()));
3393 loop_dynamic_type_param += StoreLocal(info.vars->current_type_param);
3394 loop_dynamic_type_param += Drop();
3395 loop_dynamic_type_param += Goto(call);
3396
3397 Fragment loop_call_check(call);
3398 // Load instantiators.
3399 loop_call_check += LoadLocal(info.instantiator_type_args);
3400 loop_call_check += LoadLocal(info.vars->function_type_args);
3401 // Load instantiated type parameter.
3402 loop_call_check += LoadLocal(info.vars->current_type_param);
3403 // Load bound from type parameters.
3404 loop_call_check += LoadLocal(info.type_parameters);
3405 loop_call_check += LoadNativeField(Slot::TypeParameters_bounds());
3406 loop_call_check += LoadLocal(info.vars->current_param_index);
3407 loop_call_check += LoadIndexed(
3408 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3409 // Load (canonicalized) name of type parameter in signature.
3410 loop_call_check += LoadLocal(info.type_parameters);
3411 loop_call_check += LoadNativeField(Slot::TypeParameters_names());
3412 loop_call_check += LoadLocal(info.vars->current_param_index);
3413 loop_call_check += LoadIndexed(
3414 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3415 // Assert that the passed-in type argument is consistent with the bound of
3416 // the corresponding type parameter.
3417 loop_call_check += AssertSubtype(TokenPosition::kNoSource);
3418 loop_call_check += Goto(next);
3419
3420 Fragment loop_incr(next);
3421 loop_incr += LoadLocal(info.vars->current_param_index);
3422 loop_incr += IntConstant(1);
3423 loop_incr += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3424 loop_incr += StoreLocal(info.vars->current_param_index);
3425 loop_incr += Drop();
3426 loop_incr += Goto(loop);
3427
3428 return Fragment(loop_init.entry, done);
3429}
3430
3431Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
3432 const ClosureCallInfo& info,
3433 LocalVariable* param_index,
3434 intptr_t arg_index,
3435 const String& arg_name) {
3436 Fragment instructions;
3437
3438 // Load value.
3439 instructions += LoadLocal(parsed_function_->ParameterVariable(arg_index));
3440 // Load destination type.
3441 instructions += LoadLocal(info.parameter_types);
3442 instructions += LoadLocal(param_index);
3443 instructions += LoadIndexed(
3444 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3445 // Load instantiator type arguments.
3446 instructions += LoadLocal(info.instantiator_type_args);
3447 // Load the full set of function type arguments.
3448 instructions += LoadLocal(info.vars->function_type_args);
3449 // Check that the value has the right type.
3450 instructions += AssertAssignable(TokenPosition::kNoSource, arg_name,
3451 AssertAssignableInstr::kParameterCheck);
3452 // Make sure to store the result to keep data dependencies accurate.
3453 instructions += StoreLocal(parsed_function_->ParameterVariable(arg_index));
3454 instructions += Drop();
3455
3456 return instructions;
3457}
3458
3459Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3460 const ClosureCallInfo& info) {
3461 Fragment instructions;
3462
3463 // Only check explicit arguments (i.e., skip the receiver), as the receiver
3464 // is always assignable to its type (stored as dynamic).
3465 for (intptr_t i = 1; i < info.descriptor.PositionalCount(); i++) {
3466 instructions += IntConstant(i);
3467 LocalVariable* param_index = MakeTemporary("param_index");
3468 // We don't have a compile-time name, so this symbol signals the runtime
3469 // that it should recreate the type check using info from the stack.
3470 instructions += BuildClosureCallArgumentTypeCheck(
3471 info, param_index, i, Symbols::dynamic_assert_assignable_stc_check());
3472 instructions += DropTemporary(&param_index);
3473 }
3474
3475 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3476 const intptr_t arg_index = info.descriptor.PositionAt(i);
3477 auto const param_index = info.vars->named_argument_parameter_indices.At(i);
3478 // We have a compile-time name available, but we still want the runtime to
3479 // detect that the generated AssertAssignable instruction is dynamic.
3480 instructions += BuildClosureCallArgumentTypeCheck(
3481 info, param_index, arg_index,
3482 Symbols::dynamic_assert_assignable_stc_check());
3483 }
3484
3485 return instructions;
3486}
3487
3488Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3489 LocalVariable* closure) {
3490 ClosureCallInfo info(closure, BuildThrowNoSuchMethod(),
3492 parsed_function_->dynamic_closure_call_vars());
3493
3494 Fragment body;
3495 body += LoadLocal(info.closure);
3496 body += LoadNativeField(Slot::Closure_function());
3497 body += LoadNativeField(Slot::Function_signature());
3498 info.signature = MakeTemporary("signature");
3499
3500 body += LoadLocal(info.signature);
3501 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3503 Slot::FunctionType_packed_parameter_counts());
3504 info.num_fixed_params = MakeTemporary("num_fixed_params");
3505
3506 body += LoadLocal(info.signature);
3507 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3509 Slot::FunctionType_packed_parameter_counts());
3510 info.num_opt_params = MakeTemporary("num_opt_params");
3511
3512 body += LoadLocal(info.num_fixed_params);
3513 body += LoadLocal(info.num_opt_params);
3514 body += SmiBinaryOp(Token::kADD);
3515 info.num_max_params = MakeTemporary("num_max_params");
3516
3517 body += LoadLocal(info.signature);
3518 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3520 Slot::FunctionType_packed_parameter_counts());
3521
3522 body += IntConstant(0);
3523 body += StrictCompare(Token::kNE_STRICT);
3524 info.has_named_params = MakeTemporary("has_named_params");
3525
3526 body += LoadLocal(info.signature);
3527 body += LoadNativeField(Slot::FunctionType_named_parameter_names());
3528 info.named_parameter_names = MakeTemporary("named_parameter_names");
3529
3530 body += LoadLocal(info.signature);
3531 body += LoadNativeField(Slot::FunctionType_parameter_types());
3532 info.parameter_types = MakeTemporary("parameter_types");
3533
3534 body += LoadLocal(info.signature);
3535 body += LoadNativeField(Slot::FunctionType_type_parameters());
3536 info.type_parameters = MakeTemporary("type_parameters");
3537
3538 body += LoadLocal(info.closure);
3539 body += LoadNativeField(Slot::Closure_instantiator_type_arguments());
3540 info.instantiator_type_args = MakeTemporary("instantiator_type_args");
3541
3542 body += LoadLocal(info.closure);
3543 body += LoadNativeField(Slot::Closure_function_type_arguments());
3544 info.parent_function_type_args = MakeTemporary("parent_function_type_args");
3545
3546 // At this point, all the read-only temporaries stored in the ClosureCallInfo
3547 // should be either loaded or still nullptr, if not needed for this function.
3548 // Now we check that the arguments to the closure call have the right shape.
3549 body += BuildClosureCallArgumentsValidCheck(info);
3550
3551 // If the closure function is not generic, there are no local function type
3552 // args. Thus, use whatever was stored for the parent function type arguments,
3553 // which has already been checked against any parent type parameter bounds.
3554 Fragment not_generic;
3555 not_generic += LoadLocal(info.parent_function_type_args);
3556 not_generic += StoreLocal(info.vars->function_type_args);
3557 not_generic += Drop();
3558
3559 // If the closure function is generic, then we first need to calculate the
3560 // full set of function type arguments, then check the local function type
3561 // arguments against the closure function's type parameter bounds.
3562 Fragment generic;
3563 // Calculate the number of parent type arguments and store them in
3564 // info.num_parent_type_args.
3565 generic += LoadLocal(info.signature);
3566 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3568 Slot::FunctionType_packed_type_parameter_counts());
3569 info.num_parent_type_args = MakeTemporary("num_parent_type_args");
3570
3571 // Hoist number of type parameters.
3572 generic += LoadLocal(info.signature);
3573 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3575 Slot::FunctionType_packed_type_parameter_counts());
3576 info.num_type_parameters = MakeTemporary("num_type_parameters");
3577
3578 // Hoist type parameter flags.
3579 generic += LoadLocal(info.type_parameters);
3580 generic += LoadNativeField(Slot::TypeParameters_flags());
3581 info.type_parameter_flags = MakeTemporary("type_parameter_flags");
3582
3583 // Calculate the local function type arguments and store them in
3584 // info.vars->function_type_args.
3585 generic += BuildClosureCallDefaultTypeHandling(info);
3586
3587 // Load the local function type args.
3588 generic += LoadLocal(info.vars->function_type_args);
3589 // Load the parent function type args.
3590 generic += LoadLocal(info.parent_function_type_args);
3591 // Load the number of parent type parameters.
3592 generic += LoadLocal(info.num_parent_type_args);
3593 // Load the number of total type parameters.
3594 generic += LoadLocal(info.num_parent_type_args);
3595 generic += LoadLocal(info.num_type_parameters);
3596 generic += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3597
3598 // Call the static function for prepending type arguments.
3599 generic += StaticCall(TokenPosition::kNoSource,
3600 PrependTypeArgumentsFunction(), 4, ICData::kStatic);
3601 generic += StoreLocal(info.vars->function_type_args);
3602 generic += Drop();
3603
3604 // Now that we have the full set of function type arguments, check them
3605 // against the type parameter bounds. However, if the local function type
3606 // arguments are delayed type arguments, they have already been checked by
3607 // the type system and need not be checked again at the call site.
3608 auto const check_bounds = BuildClosureCallTypeArgumentsTypeCheck(info);
3609 if (FLAG_eliminate_type_checks) {
3610 generic += TestDelayedTypeArgs(info.closure, /*present=*/{},
3611 /*absent=*/check_bounds);
3612 } else {
3613 generic += check_bounds;
3614 }
3615 generic += DropTemporary(&info.type_parameter_flags);
3616 generic += DropTemporary(&info.num_type_parameters);
3617 generic += DropTemporary(&info.num_parent_type_args);
3618
3619 // Call the appropriate fragment for setting up the function type arguments
3620 // and performing any needed type argument checking.
3621 body += TestClosureFunctionGeneric(info, generic, not_generic);
3622
3623 // Check that the values provided as arguments are assignable to the types
3624 // of the corresponding closure function parameters.
3625 body += BuildClosureCallArgumentTypeChecks(info);
3626
3627 // Drop all the read-only temporaries at the end of the fragment.
3628 body += DropTemporary(&info.parent_function_type_args);
3629 body += DropTemporary(&info.instantiator_type_args);
3630 body += DropTemporary(&info.type_parameters);
3631 body += DropTemporary(&info.parameter_types);
3632 body += DropTemporary(&info.named_parameter_names);
3633 body += DropTemporary(&info.has_named_params);
3634 body += DropTemporary(&info.num_max_params);
3635 body += DropTemporary(&info.num_opt_params);
3636 body += DropTemporary(&info.num_fixed_params);
3637 body += DropTemporary(&info.signature);
3638
3639 return body;
3640}
3641
3642FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3643 const Function& function) {
3644 const ArgumentsDescriptor descriptor(saved_args_desc_array());
3645 // Find the name of the field we should dispatch to.
3646 const Class& owner = Class::Handle(Z, function.Owner());
3647 ASSERT(!owner.IsNull());
3648 auto& field_name = String::Handle(Z, function.name());
3649 // If the field name has a dyn: tag, then remove it. We don't add dynamic
3650 // invocation forwarders for field getters used for invoking, we just use
3651 // the tag in the name of the invoke field dispatcher to detect dynamic calls.
3652 const bool is_dynamic_call =
3654 if (is_dynamic_call) {
3655 field_name = Function::DemangleDynamicInvocationForwarderName(field_name);
3656 }
3657 const String& getter_name = String::ZoneHandle(
3658 Z, Symbols::New(thread_,
3659 String::Handle(Z, Field::GetterSymbol(field_name))));
3660
3661 // Determine if this is `class Closure { get call => this; }`
3662 const Class& closure_class =
3663 Class::Handle(Z, IG->object_store()->closure_class());
3664 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3665 field_name.Equals(Symbols::call());
3666
3667 graph_entry_ =
3668 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3669
3670 auto normal_entry = BuildFunctionEntry(graph_entry_);
3671 graph_entry_->set_normal_entry(normal_entry);
3672
3673 PrologueInfo prologue_info(-1, -1);
3674 BlockEntryInstr* instruction_cursor =
3675 BuildPrologue(normal_entry, &prologue_info);
3676
3677 Fragment body(instruction_cursor);
3678 body += CheckStackOverflowInPrologue(function.token_pos());
3679
3680 // Build any dynamic closure call checks before pushing arguments to the
3681 // final call on the stack to make debugging easier.
3682 LocalVariable* closure = nullptr;
3683 if (is_closure_call) {
3684 closure = parsed_function_->ParameterVariable(0);
3685 if (is_dynamic_call) {
3686 // The whole reason for making this invoke field dispatcher is that
3687 // this closure call needs checking, so we shouldn't inline a call to an
3688 // unchecked entry that can't tail call NSM.
3690 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3691
3692 body += BuildDynamicClosureCallChecks(closure);
3693 }
3694 }
3695
3696 if (descriptor.TypeArgsLen() > 0) {
3697 LocalVariable* type_args = parsed_function_->function_type_arguments();
3698 ASSERT(type_args != nullptr);
3699 body += LoadLocal(type_args);
3700 }
3701
3702 if (is_closure_call) {
3703 // The closure itself is the first argument.
3704 body += LoadLocal(closure);
3705 } else {
3706 // Invoke the getter to get the field value.
3707 body += LoadLocal(parsed_function_->ParameterVariable(0));
3708 const intptr_t kTypeArgsLen = 0;
3709 const intptr_t kNumArgsChecked = 1;
3710 body += InstanceCall(TokenPosition::kMinSource, getter_name, Token::kGET,
3711 kTypeArgsLen, 1, Array::null_array(), kNumArgsChecked);
3712 }
3713
3714 // Push all arguments onto the stack.
3715 for (intptr_t pos = 1; pos < descriptor.Count(); pos++) {
3716 body += LoadLocal(parsed_function_->ParameterVariable(pos));
3717 }
3718
3719 // Construct argument names array if necessary.
3720 const Array* argument_names = &Object::null_array();
3721 if (descriptor.NamedCount() > 0) {
3722 const auto& array_handle =
3723 Array::ZoneHandle(Z, Array::New(descriptor.NamedCount(), Heap::kNew));
3724 String& string_handle = String::Handle(Z);
3725 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
3726 const intptr_t named_arg_index =
3727 descriptor.PositionAt(i) - descriptor.PositionalCount();
3728 string_handle = descriptor.NameAt(i);
3729 array_handle.SetAt(named_arg_index, string_handle);
3730 }
3731 argument_names = &array_handle;
3732 }
3733
3734 if (is_closure_call) {
3735 body += LoadLocal(closure);
3736 if (!FLAG_precompiled_mode) {
3737 // Lookup the function in the closure.
3738 body += LoadNativeField(Slot::Closure_function());
3739 }
3740 body += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3741 descriptor.TypeArgsLen(), descriptor.Count(),
3742 *argument_names);
3743 } else {
3744 const intptr_t kNumArgsChecked = 1;
3745 body +=
3746 InstanceCall(TokenPosition::kMinSource,
3747 is_dynamic_call ? Symbols::DynamicCall() : Symbols::call(),
3748 Token::kILLEGAL, descriptor.TypeArgsLen(),
3749 descriptor.Count(), *argument_names, kNumArgsChecked);
3750 }
3751
3752 body += Return(TokenPosition::kNoSource);
3753
3754 return new (Z)
3755 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3756 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
3757}
3758
3759FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3760 const Function& function,
3761 bool is_implicit_closure_function,
3762 bool throw_no_such_method_error) {
3763 graph_entry_ =
3764 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3765
3766 auto normal_entry = BuildFunctionEntry(graph_entry_);
3767 graph_entry_->set_normal_entry(normal_entry);
3768
3769 PrologueInfo prologue_info(-1, -1);
3770 BlockEntryInstr* instruction_cursor =
3771 BuildPrologue(normal_entry, &prologue_info);
3772
3773 Fragment body(instruction_cursor);
3774 body += CheckStackOverflowInPrologue(function.token_pos());
3775
3776 // If we are inside the tearoff wrapper function (implicit closure), we need
3777 // to extract the receiver from the context. We just replace it directly on
3778 // the stack to simplify the rest of the code.
3779 if (is_implicit_closure_function && !function.is_static()) {
3780 if (parsed_function_->has_arg_desc_var()) {
3781 body += LoadArgDescriptor();
3782 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3783 } else {
3784 ASSERT(function.NumOptionalParameters() == 0);
3785 body += IntConstant(function.NumParameters());
3786 }
3787 body += LoadLocal(parsed_function_->current_context_var());
3788 body += StoreFpRelativeSlot(
3789 kWordSize * compiler::target::frame_layout.param_end_from_fp);
3790 }
3791
3792 if (function.NeedsTypeArgumentTypeChecks()) {
3793 BuildTypeArgumentTypeChecks(TypeChecksToBuild::kCheckAllTypeParameterBounds,
3794 &body);
3795 }
3796
3797 if (function.NeedsArgumentTypeChecks()) {
3798 BuildArgumentTypeChecks(&body, &body, nullptr);
3799 }
3800
3801 body += MakeTemp();
3802 LocalVariable* result = MakeTemporary();
3803
3804 // Do "++argument_count" if any type arguments were passed.
3805 LocalVariable* argument_count_var = parsed_function_->expression_temp_var();
3806 body += IntConstant(0);
3807 body += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3808 body += Drop();
3809 if (function.IsGeneric()) {
3810 Fragment then;
3811 Fragment otherwise;
3812 otherwise += IntConstant(1);
3813 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3814 otherwise += Drop();
3815 body += TestAnyTypeArgs(then, otherwise);
3816 }
3817
3818 if (function.HasOptionalParameters()) {
3819 body += LoadArgDescriptor();
3820 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3821 } else {
3822 body += IntConstant(function.NumParameters());
3823 }
3824 body += LoadLocal(argument_count_var);
3825 body += SmiBinaryOp(Token::kADD, /* truncate= */ true);
3826 LocalVariable* argument_count = MakeTemporary();
3827
3828 // We are generating code like the following:
3829 //
3830 // var arguments = new Array<dynamic>(argument_count);
3831 //
3832 // int i = 0;
3833 // if (any type arguments are passed) {
3834 // arguments[0] = function_type_arguments;
3835 // ++i;
3836 // }
3837 //
3838 // for (; i < argument_count; ++i) {
3839 // arguments[i] = LoadFpRelativeSlot(
3840 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3841 // }
3843 body += LoadLocal(argument_count);
3844 body += CreateArray();
3845 LocalVariable* arguments = MakeTemporary();
3846
3847 {
3848 // int i = 0
3849 LocalVariable* index = parsed_function_->expression_temp_var();
3850 body += IntConstant(0);
3851 body += StoreLocal(TokenPosition::kNoSource, index);
3852 body += Drop();
3853
3854 // if (any type arguments are passed) {
3855 // arguments[0] = function_type_arguments;
3856 // i = 1;
3857 // }
3858 if (function.IsGeneric()) {
3859 Fragment store;
3860 store += LoadLocal(arguments);
3861 store += IntConstant(0);
3862 store += LoadFunctionTypeArguments();
3863 store += StoreIndexed(kArrayCid);
3864 store += IntConstant(1);
3865 store += StoreLocal(TokenPosition::kNoSource, index);
3866 store += Drop();
3867 body += TestAnyTypeArgs(store, Fragment());
3868 }
3869
3870 TargetEntryInstr* body_entry;
3871 TargetEntryInstr* loop_exit;
3872
3873 Fragment condition;
3874 // i < argument_count
3875 condition += LoadLocal(index);
3876 condition += LoadLocal(argument_count);
3877 condition += SmiRelationalOp(Token::kLT);
3878 condition += BranchIfTrue(&body_entry, &loop_exit, /*negate=*/false);
3879
3880 Fragment loop_body(body_entry);
3881
3882 // arguments[i] = LoadFpRelativeSlot(
3883 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3884 loop_body += LoadLocal(arguments);
3885 loop_body += LoadLocal(index);
3886 loop_body += LoadLocal(argument_count);
3887 loop_body += LoadLocal(index);
3888 loop_body += SmiBinaryOp(Token::kSUB, /*truncate=*/true);
3889 loop_body +=
3891 compiler::target::frame_layout.param_end_from_fp,
3893 loop_body += StoreIndexed(kArrayCid);
3894
3895 // ++i
3896 loop_body += LoadLocal(index);
3897 loop_body += IntConstant(1);
3898 loop_body += SmiBinaryOp(Token::kADD, /*truncate=*/true);
3899 loop_body += StoreLocal(TokenPosition::kNoSource, index);
3900 loop_body += Drop();
3901
3902 JoinEntryInstr* join = BuildJoinEntry();
3903 loop_body += Goto(join);
3904
3905 Fragment loop(join);
3906 loop += condition;
3907
3908 Instruction* entry =
3909 new (Z) GotoInstr(join, CompilerState::Current().GetNextDeoptId());
3910 body += Fragment(entry, loop_exit);
3911 }
3912
3913 // Load receiver.
3914 if (is_implicit_closure_function) {
3915 if (throw_no_such_method_error) {
3916 const Function& parent =
3917 Function::ZoneHandle(Z, function.parent_function());
3918 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3919 AbstractType& type = AbstractType::ZoneHandle(Z);
3920 type = Type::New(owner, Object::null_type_arguments());
3922 body += Constant(type);
3923 } else {
3924 body += LoadLocal(parsed_function_->current_context_var());
3925 }
3926 } else {
3927 body += LoadLocal(parsed_function_->ParameterVariable(0));
3928 }
3929
3930 body += Constant(String::ZoneHandle(Z, function.name()));
3931
3932 if (!parsed_function_->has_arg_desc_var()) {
3933 // If there is no variable for the arguments descriptor (this function's
3934 // signature doesn't require it), then we need to create one.
3935 Array& args_desc = Array::ZoneHandle(
3936 Z, ArgumentsDescriptor::NewBoxed(0, function.NumParameters()));
3937 body += Constant(args_desc);
3938 } else {
3939 body += LoadArgDescriptor();
3940 }
3941
3942 body += LoadLocal(arguments);
3943
3944 if (throw_no_such_method_error) {
3945 const Function& parent =
3946 Function::ZoneHandle(Z, function.parent_function());
3947 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3948 InvocationMirror::Level im_level = owner.IsTopLevel()
3951 InvocationMirror::Kind im_kind;
3952 if (function.IsImplicitGetterFunction() || function.IsGetterFunction()) {
3953 im_kind = InvocationMirror::kGetter;
3954 } else if (function.IsImplicitSetterFunction() ||
3955 function.IsSetterFunction()) {
3956 im_kind = InvocationMirror::kSetter;
3957 } else {
3958 im_kind = InvocationMirror::kMethod;
3959 }
3960 body += IntConstant(InvocationMirror::EncodeType(im_level, im_kind));
3961 } else {
3962 body += NullConstant();
3963 }
3964
3965 // Push the number of delayed type arguments.
3966 if (function.IsClosureFunction()) {
3967 LocalVariable* closure = parsed_function_->ParameterVariable(0);
3968 Fragment then;
3969 then += IntConstant(function.NumTypeParameters());
3970 then += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3971 then += Drop();
3972 Fragment otherwise;
3973 otherwise += IntConstant(0);
3974 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3975 otherwise += Drop();
3976 body += TestDelayedTypeArgs(closure, then, otherwise);
3977 body += LoadLocal(argument_count_var);
3978 } else {
3979 body += IntConstant(0);
3980 }
3981
3982 const Class& mirror_class =
3983 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
3984 ASSERT(!mirror_class.IsNull());
3985 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
3986 ASSERT(error == Error::null());
3987 const Function& allocation_function = Function::ZoneHandle(
3988 Z, mirror_class.LookupStaticFunction(Library::PrivateCoreLibName(
3989 Symbols::AllocateInvocationMirrorForClosure())));
3990 ASSERT(!allocation_function.IsNull());
3991 body += StaticCall(TokenPosition::kMinSource, allocation_function,
3992 /* argument_count = */ 5, ICData::kStatic);
3993
3994 if (throw_no_such_method_error) {
3995 const Class& klass = Class::ZoneHandle(
3996 Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
3997 ASSERT(!klass.IsNull());
3998 const auto& error = klass.EnsureIsFinalized(H.thread());
3999 ASSERT(error == Error::null());
4000 const Function& throw_function = Function::ZoneHandle(
4001 Z,
4002 klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNewInvocation()));
4003 ASSERT(!throw_function.IsNull());
4004 body += StaticCall(TokenPosition::kNoSource, throw_function, 2,
4005 ICData::kStatic);
4006 } else {
4007 body += InstanceCall(
4008 TokenPosition::kNoSource, Symbols::NoSuchMethod(), Token::kILLEGAL,
4009 /*type_args_len=*/0, /*argument_count=*/2, Array::null_array(),
4010 /*checked_argument_count=*/1);
4011 }
4012 body += StoreLocal(TokenPosition::kNoSource, result);
4013 body += Drop();
4014
4015 body += Drop(); // arguments
4016 body += Drop(); // argument count
4017
4018 AbstractType& return_type = AbstractType::Handle(function.result_type());
4019 if (!return_type.IsTopTypeForSubtyping()) {
4020 body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
4021 return_type, Symbols::Empty());
4022 }
4023 body += Return(TokenPosition::kNoSource);
4024
4025 return new (Z)
4026 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4027 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4028}
4029
4030Fragment FlowGraphBuilder::BuildDefaultTypeHandling(const Function& function) {
4031 Fragment keep_same, use_defaults;
4032
4033 if (!function.IsGeneric()) return keep_same;
4034
4035 const auto& default_types =
4036 TypeArguments::ZoneHandle(Z, function.DefaultTypeArguments(Z));
4037
4038 if (default_types.IsNull()) return keep_same;
4039
4040 if (function.IsClosureFunction()) {
4041 // Note that we can't use TranslateInstantiatedTypeArguments here as
4042 // that uses LoadInstantiatorTypeArguments() and LoadFunctionTypeArguments()
4043 // for the instantiator and function type argument vectors, but here we
4044 // load the instantiator and parent function type argument vectors from
4045 // the closure object instead.
4046 LocalVariable* const closure = parsed_function_->ParameterVariable(0);
4047 auto const mode = function.default_type_arguments_instantiation_mode();
4048
4049 switch (mode) {
4051 use_defaults += Constant(default_types);
4052 break;
4054 use_defaults += LoadLocal(closure);
4055 use_defaults +=
4056 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4057 break;
4059 use_defaults += LoadLocal(closure);
4060 use_defaults +=
4061 LoadNativeField(Slot::Closure_function_type_arguments());
4062 break;
4064 // Only load the instantiator or function type arguments from the
4065 // closure if they're needed for instantiation.
4066 if (!default_types.IsInstantiated(kCurrentClass)) {
4067 use_defaults += LoadLocal(closure);
4068 use_defaults +=
4069 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4070 } else {
4071 use_defaults += NullConstant();
4072 }
4073 if (!default_types.IsInstantiated(kFunctions)) {
4074 use_defaults += LoadLocal(closure);
4075 use_defaults +=
4076 LoadNativeField(Slot::Closure_function_type_arguments());
4077 } else {
4078 use_defaults += NullConstant();
4079 }
4080 use_defaults += InstantiateTypeArguments(default_types);
4081 break;
4082 }
4083 } else {
4084 use_defaults += TranslateInstantiatedTypeArguments(default_types);
4085 }
4086 use_defaults += StoreLocal(parsed_function_->function_type_arguments());
4087 use_defaults += Drop();
4088
4089 return TestAnyTypeArgs(keep_same, use_defaults);
4090}
4091
4092FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
4093 Fragment shared_prologue_linked_in,
4094 Fragment skippable_checks,
4095 Fragment redefinitions_if_skipped,
4096 Fragment body) {
4097 ASSERT(shared_prologue_linked_in.entry == graph_entry_->normal_entry());
4098 ASSERT(parsed_function_->has_entry_points_temp_var());
4099 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
4100
4101 auto* join_entry = BuildJoinEntry();
4102
4103 Fragment normal_entry(shared_prologue_linked_in.entry);
4104 normal_entry +=
4105 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4106 normal_entry += StoreLocal(TokenPosition::kNoSource,
4107 parsed_function_->entry_points_temp_var());
4108 normal_entry += Drop();
4109 normal_entry += Goto(join_entry);
4110
4111 auto* extra_target_entry = BuildFunctionEntry(graph_entry_);
4112 Fragment extra_entry(extra_target_entry);
4113 extra_entry += IntConstant(
4114 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4115 extra_entry += StoreLocal(TokenPosition::kNoSource,
4116 parsed_function_->entry_points_temp_var());
4117 extra_entry += Drop();
4118 extra_entry += Goto(join_entry);
4119
4120 if (prologue_start != nullptr) {
4121 join_entry->LinkTo(prologue_start);
4122 } else {
4123 // Prologue is empty.
4124 shared_prologue_linked_in.current = join_entry;
4125 }
4126
4127 TargetEntryInstr* do_checks;
4128 TargetEntryInstr* skip_checks;
4129 shared_prologue_linked_in +=
4130 LoadLocal(parsed_function_->entry_points_temp_var());
4131 shared_prologue_linked_in += BuildEntryPointsIntrospection();
4132 shared_prologue_linked_in +=
4133 LoadLocal(parsed_function_->entry_points_temp_var());
4134 shared_prologue_linked_in += IntConstant(
4135 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4136 shared_prologue_linked_in +=
4137 BranchIfEqual(&skip_checks, &do_checks, /*negate=*/false);
4138
4139 JoinEntryInstr* rest_entry = BuildJoinEntry();
4140
4141 Fragment(do_checks) + skippable_checks + Goto(rest_entry);
4142 Fragment(skip_checks) + redefinitions_if_skipped + Goto(rest_entry);
4143 Fragment(rest_entry) + body;
4144
4145 return extra_target_entry;
4146}
4147
4148FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
4149 BlockEntryInstr* normal_entry,
4150 Fragment normal_prologue,
4151 Fragment extra_prologue,
4152 Fragment shared_prologue,
4153 Fragment body) {
4154 auto* join_entry = BuildJoinEntry();
4155 auto* extra_entry = BuildFunctionEntry(graph_entry_);
4156
4157 Fragment normal(normal_entry);
4158 normal += IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4160 normal += normal_prologue;
4161 normal += Goto(join_entry);
4162
4163 Fragment extra(extra_entry);
4164 extra +=
4165 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kSeparate));
4167 extra += extra_prologue;
4168 extra += Goto(join_entry);
4169
4170 Fragment(join_entry) + shared_prologue + body;
4171 return extra_entry;
4172}
4173
4174FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
4175 const Function& function) {
4176 const Function& parent = Function::ZoneHandle(Z, function.parent_function());
4177 Function& target = Function::ZoneHandle(Z, function.ImplicitClosureTarget(Z));
4178
4179 if (target.IsNull() ||
4180 (parent.num_fixed_parameters() != target.num_fixed_parameters())) {
4181 return BuildGraphOfNoSuchMethodForwarder(function, true,
4182 parent.is_static());
4183 }
4184
4185 graph_entry_ =
4186 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4187
4188 auto normal_entry = BuildFunctionEntry(graph_entry_);
4189 graph_entry_->set_normal_entry(normal_entry);
4190
4191 PrologueInfo prologue_info(-1, -1);
4192 BlockEntryInstr* instruction_cursor =
4193 BuildPrologue(normal_entry, &prologue_info);
4194
4195 Fragment closure(instruction_cursor);
4196 closure += CheckStackOverflowInPrologue(function.token_pos());
4197 closure += BuildDefaultTypeHandling(function);
4198
4199 // For implicit closure functions, any non-covariant checks are either
4200 // performed by the type system or a dynamic invocation layer (dynamic closure
4201 // call dispatcher, mirror, etc.). Static targets never have covariant
4202 // arguments, and for non-static targets, they already perform the covariant
4203 // checks internally. Thus, no checks are needed and we just need to invoke
4204 // the target with the right receiver (unless static).
4205 //
4206 // TODO(dartbug.com/44195): Consider replacing the argument pushes + static
4207 // call with stack manipulation and a tail call instead.
4208
4209 intptr_t type_args_len = 0;
4210 if (function.IsGeneric()) {
4211 if (target.IsConstructor()) {
4212 const auto& result_type = AbstractType::Handle(Z, function.result_type());
4213 ASSERT(result_type.IsFinalized());
4214 // Instantiate a flattened type arguments vector which
4215 // includes type arguments corresponding to superclasses.
4216 // TranslateInstantiatedTypeArguments is smart enough to
4217 // avoid instantiation and reuse passed function type arguments
4218 // if there are no extra type arguments in the flattened vector.
4219 const auto& instantiated_type_arguments = TypeArguments::ZoneHandle(
4220 Z, Type::Cast(result_type).GetInstanceTypeArguments(H.thread()));
4221 closure +=
4222 TranslateInstantiatedTypeArguments(instantiated_type_arguments);
4223 } else {
4224 type_args_len = function.NumTypeParameters();
4225 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4226 closure += LoadLocal(parsed_function_->function_type_arguments());
4227 }
4228 } else if (target.IsFactory()) {
4229 // Factories always take an extra implicit argument for
4230 // type arguments even if their classes don't have type parameters.
4231 closure += NullConstant();
4232 }
4233
4234 // Push receiver.
4235 if (target.IsGenerativeConstructor()) {
4236 const Class& cls = Class::ZoneHandle(Z, target.Owner());
4237 if (cls.NumTypeArguments() > 0) {
4238 if (!function.IsGeneric()) {
4240 Z, cls.GetDeclarationInstanceTypeArguments()));
4241 }
4242 closure += AllocateObject(function.token_pos(), cls, 1);
4243 } else {
4244 ASSERT(!function.IsGeneric());
4245 closure += AllocateObject(function.token_pos(), cls, 0);
4246 }
4247 LocalVariable* receiver = MakeTemporary();
4248 closure += LoadLocal(receiver);
4249 } else if (!target.is_static()) {
4250 // The closure context is the receiver.
4251 closure += LoadLocal(parsed_function_->ParameterVariable(0));
4252 closure += LoadNativeField(Slot::Closure_context());
4253 }
4254
4255 closure += PushExplicitParameters(function);
4256
4257 // Forward parameters to the target.
4258 intptr_t argument_count = function.NumParameters() -
4259 function.NumImplicitParameters() +
4260 target.NumImplicitParameters();
4261 ASSERT(argument_count == target.NumParameters());
4262
4263 Array& argument_names =
4264 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4265
4266 closure += StaticCall(function.token_pos(), target, argument_count,
4267 argument_names, ICData::kNoRebind,
4268 /* result_type = */ nullptr, type_args_len);
4269
4270 if (target.IsGenerativeConstructor()) {
4271 // Drop result of constructor invocation, leave receiver
4272 // instance on the stack.
4273 closure += Drop();
4274 }
4275
4276 // Return the result.
4277 closure += Return(function.end_token_pos());
4278
4279 return new (Z)
4280 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4281 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4282}
4283
4284FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
4285 const Function& function) {
4286 ASSERT(function.IsImplicitGetterOrSetter() ||
4287 function.IsDynamicInvocationForwarder());
4288
4289 // Instead of building a dynamic invocation forwarder that checks argument
4290 // type and then invokes original setter we simply generate the type check
4291 // and inlined field store. Scope builder takes care of setting correct
4292 // type check mode in this case.
4293 const auto& target = Function::Handle(
4294 Z, function.IsDynamicInvocationForwarder() ? function.ForwardingTarget()
4295 : function.ptr());
4296 ASSERT(target.IsImplicitGetterOrSetter());
4297
4298 const bool is_method = !function.IsStaticFunction();
4299 const bool is_setter = target.IsImplicitSetterFunction();
4300 const bool is_getter = target.IsImplicitGetterFunction() ||
4301 target.IsImplicitStaticGetterFunction();
4302 ASSERT(is_setter || is_getter);
4303
4304 const auto& field = Field::ZoneHandle(Z, target.accessor_field());
4305
4306 graph_entry_ =
4307 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4308
4309 auto normal_entry = BuildFunctionEntry(graph_entry_);
4310 graph_entry_->set_normal_entry(normal_entry);
4311
4312 Fragment body(normal_entry);
4313 if (is_setter) {
4314 auto const setter_value =
4315 parsed_function_->ParameterVariable(is_method ? 1 : 0);
4316 if (is_method) {
4317 body += LoadLocal(parsed_function_->ParameterVariable(0));
4318 }
4319 body += LoadLocal(setter_value);
4320
4321 // The dyn:* forwarder has to check the parameters that the
4322 // actual target will not check.
4323 // Though here we manually inline the target, so the dyn:* forwarder has to
4324 // check all parameters.
4325 const bool needs_type_check = function.IsDynamicInvocationForwarder() ||
4326 setter_value->needs_type_check();
4327 if (needs_type_check) {
4328 body += CheckAssignable(setter_value->static_type(), setter_value->name(),
4329 AssertAssignableInstr::kParameterCheck,
4330 field.token_pos());
4331 }
4332 if (field.is_late()) {
4333 if (is_method) {
4334 body += Drop();
4335 }
4336 body += Drop();
4337 body += StoreLateField(
4338 field, is_method ? parsed_function_->ParameterVariable(0) : nullptr,
4339 setter_value);
4340 } else {
4341 if (is_method) {
4343 } else {
4344 body += StoreStaticField(TokenPosition::kNoSource, field);
4345 }
4346 }
4347 body += NullConstant();
4348 } else {
4349 ASSERT(is_getter);
4350 if (is_method) {
4351 body += LoadLocal(parsed_function_->ParameterVariable(0));
4352 body += LoadField(
4353 field, /*calls_initializer=*/field.NeedsInitializationCheckOnLoad());
4354 } else if (field.is_const()) {
4355 const auto& value = Object::Handle(Z, field.StaticConstFieldValue());
4356 if (value.IsError()) {
4357 Report::LongJump(Error::Cast(value));
4358 }
4360 } else {
4361 // Static fields
4362 // - with trivial initializer
4363 // - without initializer if they are not late
4364 // are initialized eagerly and do not have implicit getters.
4365 // Static fields with non-trivial initializer need getter to perform
4366 // lazy initialization. Late fields without initializer need getter
4367 // to make sure they are already initialized.
4368 ASSERT(field.has_nontrivial_initializer() ||
4369 (field.is_late() && !field.has_initializer()));
4370 body += LoadStaticField(field, /*calls_initializer=*/true);
4371 }
4372
4373 if (is_method || !field.is_const()) {
4374#if defined(PRODUCT)
4375 RELEASE_ASSERT(!field.needs_load_guard());
4376#else
4377 // Always build fragment for load guard to maintain stable deopt_id
4378 // numbering, but link it into the graph only if field actually
4379 // needs load guard.
4380 Fragment load_guard = CheckAssignable(
4381 AbstractType::Handle(Z, field.type()), Symbols::FunctionResult());
4382 if (field.needs_load_guard()) {
4383 ASSERT(IG->HasAttemptedReload());
4384 body += load_guard;
4385 }
4386#endif
4387 }
4388 }
4389 body += Return(TokenPosition::kNoSource);
4390
4391 PrologueInfo prologue_info(-1, -1);
4392 return new (Z)
4393 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4394 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4395}
4396
4397FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
4398 const Function& function) {
4399 auto& name = String::Handle(Z, function.name());
4401 const auto& target = Function::ZoneHandle(Z, function.ForwardingTarget());
4402 ASSERT(!target.IsNull());
4403
4404 if (target.IsImplicitSetterFunction() || target.IsImplicitGetterFunction()) {
4405 return BuildGraphOfFieldAccessor(function);
4406 }
4407 if (target.IsMethodExtractor()) {
4408 return BuildGraphOfMethodExtractor(target);
4409 }
4411 return BuildGraphOfRecognizedMethod(function);
4412 }
4413
4414 graph_entry_ = new (Z) GraphEntryInstr(*parsed_function_, osr_id_);
4415
4416 auto normal_entry = BuildFunctionEntry(graph_entry_);
4417 graph_entry_->set_normal_entry(normal_entry);
4418
4419 PrologueInfo prologue_info(-1, -1);
4420 auto instruction_cursor = BuildPrologue(normal_entry, &prologue_info);
4421
4422 Fragment body;
4423 if (!function.is_native()) {
4424 body += CheckStackOverflowInPrologue(function.token_pos());
4425 }
4426
4427 ASSERT(parsed_function_->scope()->num_context_variables() == 0);
4428
4429 // Should never build a dynamic invocation forwarder for equality
4430 // operator.
4431 ASSERT(function.name() != Symbols::EqualOperator().ptr());
4432
4433 // Even if the caller did not pass argument vector we would still
4434 // call the target with instantiate-to-bounds type arguments.
4435 body += BuildDefaultTypeHandling(function);
4436
4437 // Build argument type checks that complement those that are emitted in the
4438 // target.
4439 BuildTypeArgumentTypeChecks(
4441 BuildArgumentTypeChecks(&body, &body, nullptr);
4442
4443 // Push all arguments and invoke the original method.
4444
4445 intptr_t type_args_len = 0;
4446 if (function.IsGeneric()) {
4447 type_args_len = function.NumTypeParameters();
4448 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4449 body += LoadLocal(parsed_function_->function_type_arguments());
4450 }
4451
4452 // Push receiver.
4453 ASSERT(function.NumImplicitParameters() == 1);
4454 body += LoadLocal(parsed_function_->receiver_var());
4455 body += PushExplicitParameters(function, target);
4456
4457 const intptr_t argument_count = function.NumParameters();
4458 const auto& argument_names =
4459 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4460
4461 body += StaticCall(TokenPosition::kNoSource, target, argument_count,
4462 argument_names, ICData::kNoRebind, nullptr, type_args_len);
4463
4464 if (target.has_unboxed_integer_return()) {
4465 body += Box(kUnboxedInt64);
4466 } else if (target.has_unboxed_double_return()) {
4467 body += Box(kUnboxedDouble);
4468 } else if (target.has_unboxed_record_return()) {
4469 // Handled in SelectRepresentations pass in optimized mode.
4470 ASSERT(optimizing_);
4471 }
4472
4473 // Later optimization passes assume that result of a x.[]=(...) call is not
4474 // used. We must guarantee this invariant because violation will lead to an
4475 // illegal IL once we replace x.[]=(...) with a sequence that does not
4476 // actually produce any value. See http://dartbug.com/29135 for more details.
4477 if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
4478 body += Drop();
4479 body += NullConstant();
4480 }
4481
4482 body += Return(TokenPosition::kNoSource);
4483
4484 instruction_cursor->LinkTo(body.entry);
4485
4486 // When compiling for OSR, use a depth first search to find the OSR
4487 // entry and make graph entry jump to it instead of normal entry.
4488 // Catch entries are always considered reachable, even if they
4489 // become unreachable after OSR.
4490 if (IsCompiledForOsr()) {
4491 graph_entry_->RelinkToOsrEntry(Z, last_used_block_id_ + 1);
4492 }
4493 return new (Z)
4494 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4495 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4496}
4497
4498void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4499 const Fragment& fragment,
4500 int64_t min,
4501 int64_t max) {
4502 ASSERT(fragment.current->IsDefinition());
4503 Range range(RangeBoundary::FromConstant(min),
4505 fragment.current->AsDefinition()->set_range(range);
4506}
4507
4508static classid_t TypedDataCidUnboxed(Representation unboxed_representation) {
4509 switch (unboxed_representation) {
4510 case kUnboxedFloat:
4511 // Note kTypedDataFloat32ArrayCid loads kUnboxedDouble.
4512 UNREACHABLE();
4513 return kTypedDataFloat32ArrayCid;
4514 case kUnboxedInt32:
4515 return kTypedDataInt32ArrayCid;
4516 case kUnboxedUint32:
4517 return kTypedDataUint32ArrayCid;
4518 case kUnboxedInt64:
4519 return kTypedDataInt64ArrayCid;
4520 case kUnboxedDouble:
4521 return kTypedDataFloat64ArrayCid;
4522 default:
4523 UNREACHABLE();
4524 }
4525 UNREACHABLE();
4526}
4527
4528Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4529 Representation unboxed_representation,
4530 intptr_t index_scale,
4531 bool index_unboxed) {
4532 ASSERT(unboxed_representation == kUnboxedInt32 ||
4533 unboxed_representation == kUnboxedUint32 ||
4534 unboxed_representation == kUnboxedInt64 ||
4535 unboxed_representation == kUnboxedFloat ||
4536 unboxed_representation == kUnboxedDouble);
4537 Fragment fragment;
4538 if (unboxed_representation == kUnboxedFloat) {
4539 fragment += BitCast(kUnboxedFloat, kUnboxedInt32);
4540 unboxed_representation = kUnboxedInt32;
4541 }
4542 fragment += StoreIndexedTypedData(TypedDataCidUnboxed(unboxed_representation),
4543 index_scale, index_unboxed);
4544 return fragment;
4545}
4546
4547Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4548 Representation unboxed_representation,
4549 intptr_t index_scale,
4550 bool index_unboxed) {
4551 ASSERT(unboxed_representation == kUnboxedInt32 ||
4552 unboxed_representation == kUnboxedUint32 ||
4553 unboxed_representation == kUnboxedInt64 ||
4554 unboxed_representation == kUnboxedFloat ||
4555 unboxed_representation == kUnboxedDouble);
4556 Representation representation_for_load = unboxed_representation;
4557 if (unboxed_representation == kUnboxedFloat) {
4558 representation_for_load = kUnboxedInt32;
4559 }
4560 Fragment fragment;
4561 fragment += LoadIndexed(TypedDataCidUnboxed(representation_for_load),
4562 index_scale, index_unboxed);
4563 if (unboxed_representation == kUnboxedFloat) {
4564 fragment += BitCast(kUnboxedInt32, kUnboxedFloat);
4565 }
4566 return fragment;
4567}
4568
4569Fragment FlowGraphBuilder::UnhandledException() {
4570 const auto class_table = thread_->isolate_group()->class_table();
4571 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4572 const auto& klass =
4573 Class::ZoneHandle(H.zone(), class_table->At(kUnhandledExceptionCid));
4574 ASSERT(!klass.IsNull());
4575 Fragment body;
4576 body += AllocateObject(TokenPosition::kNoSource, klass, 0);
4577 LocalVariable* error_instance = MakeTemporary();
4578
4579 body += LoadLocal(error_instance);
4580 body += LoadLocal(CurrentException());
4581 body +=
4582 StoreNativeField(Slot::UnhandledException_exception(),
4584
4585 body += LoadLocal(error_instance);
4586 body += LoadLocal(CurrentStackTrace());
4587 body +=
4588 StoreNativeField(Slot::UnhandledException_stacktrace(),
4590
4591 return body;
4592}
4593
4594Fragment FlowGraphBuilder::UnboxTruncate(Representation to) {
4595 auto const unbox_to = to == kUnboxedFloat ? kUnboxedDouble : to;
4596 Fragment instructions;
4597 auto* unbox = UnboxInstr::Create(unbox_to, Pop(), DeoptId::kNone,
4599 instructions <<= unbox;
4600 Push(unbox);
4601 if (to == kUnboxedFloat) {
4602 instructions += DoubleToFloat();
4603 }
4604 return instructions;
4605}
4606
4607Fragment FlowGraphBuilder::LoadThread() {
4608 LoadThreadInstr* instr = new (Z) LoadThreadInstr();
4609 Push(instr);
4610 return Fragment(instr);
4611}
4612
4613Fragment FlowGraphBuilder::LoadIsolate() {
4614 Fragment body;
4615 body += LoadThread();
4616 body += LoadNativeField(Slot::Thread_isolate());
4617 return body;
4618}
4619
4620Fragment FlowGraphBuilder::LoadIsolateGroup() {
4621 Fragment body;
4622 body += LoadThread();
4623 body += LoadNativeField(Slot::Thread_isolate_group());
4624 return body;
4625}
4626
4627Fragment FlowGraphBuilder::LoadObjectStore() {
4628 Fragment body;
4629 body += LoadIsolateGroup();
4630 body += LoadNativeField(Slot::IsolateGroup_object_store());
4631 return body;
4632}
4633
4634Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4635 Fragment body;
4636 body += LoadThread();
4637 body += LoadNativeField(Slot::Thread_service_extension_stream());
4638 return body;
4639}
4640
4641// TODO(http://dartbug.com/47487): Support unboxed output value.
4642Fragment FlowGraphBuilder::BoolToInt() {
4643 // TODO(http://dartbug.com/36855) Build IfThenElseInstr, instead of letting
4644 // the optimizer turn this into that.
4645
4646 LocalVariable* expression_temp = parsed_function_->expression_temp_var();
4647
4648 Fragment instructions;
4649 TargetEntryInstr* is_true;
4650 TargetEntryInstr* is_false;
4651
4652 instructions += BranchIfTrue(&is_true, &is_false);
4653 JoinEntryInstr* join = BuildJoinEntry();
4654
4655 {
4656 Fragment store_1(is_true);
4657 store_1 += IntConstant(1);
4658 store_1 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4659 store_1 += Drop();
4660 store_1 += Goto(join);
4661 }
4662
4663 {
4664 Fragment store_0(is_false);
4665 store_0 += IntConstant(0);
4666 store_0 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4667 store_0 += Drop();
4668 store_0 += Goto(join);
4669 }
4670
4671 instructions = Fragment(instructions.entry, join);
4672 instructions += LoadLocal(expression_temp);
4673 return instructions;
4674}
4675
4676Fragment FlowGraphBuilder::IntToBool() {
4677 Fragment body;
4678 body += IntConstant(0);
4679 body += StrictCompare(Token::kNE_STRICT);
4680 return body;
4681}
4682
4683Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4684 Token::Kind kind) {
4685 if (CompilerState::Current().is_aot()) {
4686 Value* right = Pop();
4687 Value* left = Pop();
4688 RelationalOpInstr* instr = new (Z) RelationalOpInstr(
4689 InstructionSource(position), kind, left, right, kMintCid,
4690 GetNextDeoptId(), Instruction::SpeculativeMode::kNotSpeculative);
4691 Push(instr);
4692 return Fragment(instr);
4693 }
4694 const String* name = nullptr;
4695 switch (kind) {
4696 case Token::kLT:
4698 break;
4699 case Token::kGT:
4701 break;
4702 case Token::kLTE:
4703 name = &Symbols::LessEqualOperator();
4704 break;
4705 case Token::kGTE:
4706 name = &Symbols::GreaterEqualOperator();
4707 break;
4708 default:
4709 UNREACHABLE();
4710 }
4711 return InstanceCall(
4712 position, *name, kind, /*type_args_len=*/0, /*argument_count=*/2,
4713 /*argument_names=*/Array::null_array(), /*checked_argument_count=*/2);
4714}
4715
4716Fragment FlowGraphBuilder::NativeReturn(
4717 const compiler::ffi::CallbackMarshaller& marshaller) {
4718 const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
4719 if (num_return_defs == 1) {
4720 auto* instr = new (Z) NativeReturnInstr(Pop(), marshaller);
4721 return Fragment(instr).closed();
4722 }
4723 ASSERT_EQUAL(num_return_defs, 2);
4724 auto* offset = Pop();
4725 auto* typed_data_base = Pop();
4726 auto* instr = new (Z) NativeReturnInstr(typed_data_base, offset, marshaller);
4727 return Fragment(instr).closed();
4728}
4729
4730Fragment FlowGraphBuilder::BitCast(Representation from, Representation to) {
4731 BitCastInstr* instr = new (Z) BitCastInstr(from, to, Pop());
4732 Push(instr);
4733 return Fragment(instr);
4734}
4735
4736Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4737 Call1ArgStubInstr::StubId stub_id) {
4738 Call1ArgStubInstr* instr = new (Z) Call1ArgStubInstr(
4739 InstructionSource(position), stub_id, Pop(), GetNextDeoptId());
4740 Push(instr);
4741 return Fragment(instr);
4742}
4743
4744Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4745 SuspendInstr::StubId stub_id) {
4746 Value* type_args =
4747 (stub_id == SuspendInstr::StubId::kAwaitWithTypeCheck) ? Pop() : nullptr;
4748 Value* operand = Pop();
4749 SuspendInstr* instr =
4750 new (Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4751 type_args, GetNextDeoptId(), GetNextDeoptId());
4752 Push(instr);
4753 return Fragment(instr);
4754}
4755
4756Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4757 const AbstractType& compound_type) {
4758 const auto& compound_sub_class =
4759 Class::ZoneHandle(Z, compound_type.type_class());
4760 compound_sub_class.EnsureIsFinalized(thread_);
4761
4762 auto& state = thread_->compiler_state();
4763
4764 Fragment body;
4765 LocalVariable* typed_data = MakeTemporary("typed_data_base");
4766 body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4767 LocalVariable* compound = MakeTemporary("compound");
4768 body += LoadLocal(compound);
4769 body += LoadLocal(typed_data);
4770 body += StoreField(state.CompoundTypedDataBaseField(),
4772 body += LoadLocal(compound);
4773 body += IntConstant(0);
4774 body += StoreField(state.CompoundOffsetInBytesField(),
4776 body += DropTempsPreserveTop(1); // Drop TypedData.
4777 return body;
4778}
4779
4780Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4781 Fragment body;
4782 auto& state = thread_->compiler_state();
4783 body += LoadField(state.CompoundTypedDataBaseField(),
4784 /*calls_initializer=*/false);
4785 return body;
4786}
4787
4788Fragment FlowGraphBuilder::LoadOffsetInBytesFromCompound() {
4789 Fragment body;
4790 auto& state = thread_->compiler_state();
4791 body += LoadField(state.CompoundOffsetInBytesField(),
4792 /*calls_initializer=*/false);
4793 return body;
4794}
4795
4796Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4797 ZoneGrowableArray<LocalVariable*>* definitions,
4798 const GrowableArray<Representation>& representations) {
4799 Fragment body;
4800 const intptr_t num_defs = representations.length();
4801 ASSERT(definitions->length() == num_defs);
4802
4803 LocalVariable* uint8_list = MakeTemporary("uint8_list");
4804 int offset_in_bytes = 0;
4805 for (intptr_t i = 0; i < num_defs; i++) {
4806 const Representation representation = representations[i];
4807 body += LoadLocal(uint8_list);
4808 body += IntConstant(offset_in_bytes);
4809 body += LoadLocal(definitions->At(i));
4810 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4811 /*index_unboxed=*/false);
4812 offset_in_bytes += RepresentationUtils::ValueSize(representation);
4813 }
4814 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
4815 return body;
4816}
4817
4818static intptr_t chunk_size(intptr_t bytes_left) {
4819 ASSERT(bytes_left >= 1);
4820 if (bytes_left >= 8 && compiler::target::kWordSize == 8) {
4821 return 8;
4822 }
4823 if (bytes_left >= 4) {
4824 return 4;
4825 }
4826 if (bytes_left >= 2) {
4827 return 2;
4828 }
4829 return 1;
4830}
4831
4833 switch (chunk_size) {
4834 case 8:
4835 return kTypedDataInt64ArrayCid;
4836 case 4:
4837 return kTypedDataInt32ArrayCid;
4838 case 2:
4839 return kTypedDataInt16ArrayCid;
4840 case 1:
4841 return kTypedDataInt8ArrayCid;
4842 }
4843 UNREACHABLE();
4844}
4845
4846// Only for use within FfiCallbackConvertCompoundArgumentToDart and
4847// FfiCallbackConvertCompoundReturnToNative, where we know the "array" being
4848// passed is an untagged pointer coming from C.
4850 switch (chunk_size) {
4851 case 8:
4852 return kExternalTypedDataInt64ArrayCid;
4853 case 4:
4854 return kExternalTypedDataInt32ArrayCid;
4855 case 2:
4856 return kExternalTypedDataInt16ArrayCid;
4857 case 1:
4858 return kExternalTypedDataInt8ArrayCid;
4859 }
4860 UNREACHABLE();
4861}
4862
4863Fragment FlowGraphBuilder::LoadTail(LocalVariable* variable,
4864 intptr_t size,
4865 intptr_t offset_in_bytes,
4866 Representation representation) {
4867 Fragment body;
4868 if (size == 8 || size == 4) {
4869 body += LoadLocal(variable);
4870 body += LoadTypedDataBaseFromCompound();
4871 body += LoadLocal(variable);
4872 body += LoadOffsetInBytesFromCompound();
4873 body += IntConstant(offset_in_bytes);
4874 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4875 body += LoadIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4876 /*index_unboxed=*/false);
4877 return body;
4878 }
4879 ASSERT(representation != kUnboxedFloat);
4880 ASSERT(representation != kUnboxedDouble);
4881 intptr_t shift = 0;
4882 intptr_t remaining = size;
4883 auto step = [&](intptr_t part_bytes, intptr_t part_cid) {
4884 while (remaining >= part_bytes) {
4885 body += LoadLocal(variable);
4886 body += LoadTypedDataBaseFromCompound();
4887 body += LoadLocal(variable);
4888 body += LoadOffsetInBytesFromCompound();
4889 body += IntConstant(offset_in_bytes);
4890 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4891 body += LoadIndexed(part_cid, /*index_scale*/ 1,
4892 /*index_unboxed=*/false);
4893 if (shift != 0) {
4894 body += IntConstant(shift);
4895 // 64-bit doesn't support kUnboxedInt32 ops.
4896 Representation op_representation = kUnboxedIntPtr;
4897 body += BinaryIntegerOp(Token::kSHL, op_representation,
4898 /*is_truncating*/ true);
4899 body += BinaryIntegerOp(Token::kBIT_OR, op_representation,
4900 /*is_truncating*/ true);
4901 }
4902 offset_in_bytes += part_bytes;
4903 remaining -= part_bytes;
4904 shift += part_bytes * kBitsPerByte;
4905 }
4906 };
4907 step(8, kTypedDataUint64ArrayCid);
4908 step(4, kTypedDataUint32ArrayCid);
4909 step(2, kTypedDataUint16ArrayCid);
4910 step(1, kTypedDataUint8ArrayCid);
4911
4912 // Sigh, LoadIndex's representation for int8/16 is [u]int64, but the FfiCall
4913 // wants an [u]int32 input. Manually insert a "truncating" conversion so one
4914 // isn't automatically added that thinks it can deopt.
4915 Representation from_representation = Peek(0)->representation();
4916 if (from_representation != representation) {
4917 IntConverterInstr* convert = new IntConverterInstr(
4918 from_representation, representation, Pop(), DeoptId::kNone);
4919 convert->mark_truncating();
4920 Push(convert);
4921 body <<= convert;
4922 }
4923
4924 return body;
4925}
4926
4927Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4928 LocalVariable* variable,
4929 const compiler::ffi::BaseMarshaller& marshaller,
4930 intptr_t arg_index) {
4931 Fragment body;
4932 const auto& native_loc = marshaller.Location(arg_index);
4933 if (native_loc.IsMultiple()) {
4934 const auto& multiple_loc = native_loc.AsMultiple();
4935 intptr_t offset_in_bytes = 0;
4936 for (intptr_t i = 0; i < multiple_loc.locations().length(); i++) {
4937 const auto& loc = *multiple_loc.locations()[i];
4938 Representation representation;
4939 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
4940 // IL can only pass integers to integer Locations, so pass as integer if
4941 // the Location requires it to be an integer.
4942 representation = loc.container_type().AsRepresentationOverApprox(Z);
4943 } else {
4944 // Representations do not support 8 or 16 bit ints, over approximate to
4945 // 32 bits.
4946 representation = loc.payload_type().AsRepresentationOverApprox(Z);
4947 }
4948 intptr_t size = loc.payload_type().SizeInBytes();
4949 body += LoadTail(variable, size, offset_in_bytes, representation);
4950 offset_in_bytes += size;
4951 }
4952 } else if (native_loc.IsStack()) {
4953 // Break struct in pieces to separate IL definitions to pass those
4954 // separate definitions into the FFI call.
4955 Representation representation = kUnboxedWord;
4956 intptr_t remaining = native_loc.payload_type().SizeInBytes();
4957 intptr_t offset_in_bytes = 0;
4958 while (remaining >= compiler::target::kWordSize) {
4959 body += LoadTail(variable, compiler::target::kWordSize, offset_in_bytes,
4960 representation);
4961 offset_in_bytes += compiler::target::kWordSize;
4962 remaining -= compiler::target::kWordSize;
4963 }
4964 if (remaining > 0) {
4965 body += LoadTail(variable, remaining, offset_in_bytes, representation);
4966 }
4967 } else {
4968 ASSERT(native_loc.IsPointerToMemory());
4969 // Only load the typed data, do copying in the FFI call machine code.
4970 body += LoadLocal(variable); // User-defined struct.
4971 body += LoadTypedDataBaseFromCompound();
4972 body += LoadLocal(variable); // User-defined struct.
4973 body += LoadOffsetInBytesFromCompound();
4974 body += UnboxTruncate(kUnboxedWord);
4975 }
4976 return body;
4977}
4978
4979Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
4980 const compiler::ffi::BaseMarshaller& marshaller,
4981 intptr_t arg_index) {
4982 Fragment body;
4983 // The typed data is allocated before the FFI call, and is populated in
4984 // machine code. So, here, it only has to be wrapped in the struct class.
4985 const auto& compound_type =
4986 AbstractType::Handle(Z, marshaller.CType(arg_index));
4987 body += WrapTypedDataBaseInCompound(compound_type);
4988 return body;
4989}
4990
4991Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
4992 const compiler::ffi::BaseMarshaller& marshaller,
4993 intptr_t arg_index,
4994 ZoneGrowableArray<LocalVariable*>* definitions) {
4995 const intptr_t length_in_bytes =
4996 marshaller.Location(arg_index).payload_type().SizeInBytes();
4997
4998 Fragment body;
4999 if (marshaller.Location(arg_index).IsMultiple()) {
5000 body += IntConstant(length_in_bytes);
5001 body +=
5002 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5003 LocalVariable* uint8_list = MakeTemporary("uint8_list");
5004
5005 const auto& multiple_loc = marshaller.Location(arg_index).AsMultiple();
5006 const intptr_t num_defs = multiple_loc.locations().length();
5007 intptr_t offset_in_bytes = 0;
5008 for (intptr_t i = 0; i < num_defs; i++) {
5009 const auto& loc = *multiple_loc.locations()[i];
5010 Representation representation;
5011 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
5012 // IL can only pass integers to integer Locations, so pass as integer if
5013 // the Location requires it to be an integer.
5014 representation = loc.container_type().AsRepresentationOverApprox(Z);
5015 } else {
5016 // Representations do not support 8 or 16 bit ints, over approximate to
5017 // 32 bits.
5018 representation = loc.payload_type().AsRepresentationOverApprox(Z);
5019 }
5020 body += LoadLocal(uint8_list);
5021 body += IntConstant(offset_in_bytes);
5022 body += LoadLocal(definitions->At(i));
5023 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
5024 /*index_unboxed=*/false);
5025 offset_in_bytes += loc.payload_type().SizeInBytes();
5026 }
5027
5028 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
5029 } else if (marshaller.Location(arg_index).IsStack()) {
5030 // Allocate and populate a TypedData from the individual NativeParameters.
5031 body += IntConstant(length_in_bytes);
5032 body +=
5033 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5034 GrowableArray<Representation> representations;
5035 marshaller.RepsInFfiCall(arg_index, &representations);
5036 body += PopFromStackToTypedDataBase(definitions, representations);
5037 } else {
5038 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
5039 // Allocate a TypedData and copy contents pointed to by an address into it.
5040 LocalVariable* address_of_compound = MakeTemporary("address_of_compound");
5041 body += IntConstant(length_in_bytes);
5042 body +=
5043 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5044 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5045 intptr_t offset_in_bytes = 0;
5046 while (offset_in_bytes < length_in_bytes) {
5047 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5048 const intptr_t chunk_sizee = chunk_size(bytes_left);
5049
5050 body += LoadLocal(address_of_compound);
5051 body += IntConstant(offset_in_bytes);
5052 body +=
5053 LoadIndexed(external_typed_data_cid(chunk_sizee), /*index_scale=*/1,
5054 /*index_unboxed=*/false);
5055 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5056
5057 body += LoadLocal(typed_data_base);
5058 body += IntConstant(offset_in_bytes);
5059 body += LoadLocal(chunk_value);
5060 body += StoreIndexedTypedData(typed_data_cid(chunk_sizee),
5061 /*index_scale=*/1,
5062 /*index_unboxed=*/false);
5063 body += DropTemporary(&chunk_value);
5064
5065 offset_in_bytes += chunk_sizee;
5066 }
5067 ASSERT(offset_in_bytes == length_in_bytes);
5068 body += DropTempsPreserveTop(1); // Drop address_of_compound.
5069 }
5070 // Wrap typed data in compound class.
5071 const auto& compound_type =
5072 AbstractType::Handle(Z, marshaller.CType(arg_index));
5073 body += WrapTypedDataBaseInCompound(compound_type);
5074 return body;
5075}
5076
5077Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
5078 const compiler::ffi::CallbackMarshaller& marshaller,
5079 intptr_t arg_index) {
5080 Fragment body;
5081 const auto& native_loc = marshaller.Location(arg_index);
5082 if (native_loc.IsMultiple()) {
5083 // Pass in typed data and offset to native return instruction, and do the
5084 // copying in machine code.
5085 LocalVariable* compound = MakeTemporary("compound");
5086 body += LoadLocal(compound);
5087 body += LoadOffsetInBytesFromCompound();
5088 body += UnboxTruncate(kUnboxedWord);
5089 body += StoreLocal(TokenPosition::kNoSource,
5090 parsed_function_->expression_temp_var());
5091 body += Drop();
5092 body += LoadTypedDataBaseFromCompound();
5093 body += LoadLocal(parsed_function_->expression_temp_var());
5094 } else {
5095 ASSERT(native_loc.IsPointerToMemory());
5096 // We copy the data into the right location in IL.
5097 const intptr_t length_in_bytes =
5098 marshaller.Location(arg_index).payload_type().SizeInBytes();
5099
5100 LocalVariable* compound = MakeTemporary("compound");
5101 body += LoadLocal(compound);
5102 body += LoadTypedDataBaseFromCompound();
5103 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5104 body += LoadLocal(compound);
5105 body += LoadOffsetInBytesFromCompound();
5106 LocalVariable* offset = MakeTemporary("offset");
5107
5108 auto* pointer_to_return =
5109 new (Z) NativeParameterInstr(marshaller, compiler::ffi::kResultIndex);
5110 Push(pointer_to_return); // Address where return value should be stored.
5111 body <<= pointer_to_return;
5112 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
5113
5114 intptr_t offset_in_bytes = 0;
5115 while (offset_in_bytes < length_in_bytes) {
5116 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5117 const intptr_t chunk_sizee = chunk_size(bytes_left);
5118
5119 body += LoadLocal(typed_data_base);
5120 body += LoadLocal(offset);
5121 body += IntConstant(offset_in_bytes);
5122 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
5123 body += LoadIndexed(typed_data_cid(chunk_sizee), /*index_scale=*/1,
5124 /*index_unboxed=*/false);
5125 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5126
5127 body += LoadLocal(unboxed_address);
5128 body += IntConstant(offset_in_bytes);
5129 body += LoadLocal(chunk_value);
5131 /*index_scale=*/1,
5132 /*index_unboxed=*/false);
5133 body += DropTemporary(&chunk_value);
5134
5135 offset_in_bytes += chunk_sizee;
5136 }
5137
5138 ASSERT(offset_in_bytes == length_in_bytes);
5139 body += DropTempsPreserveTop(3);
5140 }
5141 return body;
5142}
5143
5144Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
5145 const compiler::ffi::BaseMarshaller& marshaller,
5146 intptr_t arg_index) {
5147 ASSERT(!marshaller.IsCompoundCType(arg_index));
5148
5149 Fragment body;
5150 if (marshaller.IsPointerPointer(arg_index)) {
5151 Class& result_class =
5152 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
5153 // This class might only be instantiated as a return type of ffi calls.
5154 result_class.EnsureIsFinalized(thread_);
5155
5156 TypeArguments& args =
5157 TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
5158
5159 // A kernel transform for FFI in the front-end ensures that type parameters
5160 // do not appear in the type arguments to a any Pointer classes in an FFI
5161 // signature.
5162 ASSERT(args.IsNull() || args.IsInstantiated());
5163 args = args.Canonicalize(thread_);
5164
5165 LocalVariable* address = MakeTemporary("address");
5166 LocalVariable* result = parsed_function_->expression_temp_var();
5167
5168 body += Constant(args);
5169 body += AllocateObject(TokenPosition::kNoSource, result_class, 1);
5170 body += StoreLocal(TokenPosition::kNoSource, result);
5171 body += LoadLocal(address);
5172 body += StoreNativeField(Slot::PointerBase_data(),
5175 body += DropTemporary(&address); // address
5176 body += LoadLocal(result);
5177 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5178 UNREACHABLE(); // Only supported for FFI call arguments.
5179 } else if (marshaller.IsCompoundPointer(arg_index)) {
5180 UNREACHABLE(); // Only supported for FFI call arguments.
5181 } else if (marshaller.IsHandleCType(arg_index)) {
5182 // The top of the stack is a Dart_Handle, so retrieve the tagged pointer
5183 // out of it.
5184 body += LoadNativeField(Slot::LocalHandle_ptr());
5185 } else if (marshaller.IsVoid(arg_index)) {
5186 // Ignore whatever value was being returned and return null.
5188 body += Drop();
5189 body += NullConstant();
5190 } else {
5191 if (marshaller.RequiresBitCast(arg_index)) {
5192 body += BitCast(
5193 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)),
5194 marshaller.RepInDart(arg_index));
5195 }
5196
5197 body += Box(marshaller.RepInDart(arg_index));
5198
5199 if (marshaller.IsBool(arg_index)) {
5200 body += IntToBool();
5201 }
5202 }
5203 return body;
5204}
5205
5206Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
5207 const compiler::ffi::BaseMarshaller& marshaller,
5208 intptr_t arg_index,
5209 LocalVariable* variable) {
5210 ASSERT(!marshaller.IsCompoundCType(arg_index));
5211
5212 Fragment body;
5213 if (marshaller.IsPointerPointer(arg_index)) {
5214 // This can only be Pointer, so it is safe to load the data field.
5215 body += LoadNativeField(Slot::PointerBase_data(),
5217 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5218 // Nothing to do. Unwrap in `FfiCallInstr::EmitNativeCode`.
5219 } else if (marshaller.IsCompoundPointer(arg_index)) {
5220 ASSERT(variable != nullptr);
5221 body += LoadTypedDataBaseFromCompound();
5222 body += LoadLocal(variable); // User-defined struct.
5223 body += LoadOffsetInBytesFromCompound();
5224 body += UnboxTruncate(kUnboxedWord);
5225 } else if (marshaller.IsHandleCType(arg_index)) {
5226 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5227 // pointer to the stack slot as the native handle argument. Therefore the
5228 // only handles that need wrapping are function results.
5230 LocalVariable* object = MakeTemporary("object");
5231
5232 auto* const arg_reps =
5233 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5234
5235 // Get a reference to the top handle scope.
5236 body += LoadThread();
5237 body += LoadNativeField(Slot::Thread_api_top_scope());
5238 arg_reps->Add(kUntagged);
5239
5240 // Allocate a new handle in the top handle scope.
5241 body +=
5242 CallLeafRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
5243
5244 LocalVariable* handle = MakeTemporary("handle");
5245
5246 // Store the object address into the handle.
5247 body += LoadLocal(handle);
5248 body += LoadLocal(object);
5249 body += StoreNativeField(Slot::LocalHandle_ptr(),
5251
5252 body += DropTempsPreserveTop(1); // Drop object.
5253 } else if (marshaller.IsVoid(arg_index)) {
5255 // Ignore whatever value was being returned and return nullptr.
5256 body += Drop();
5258 } else {
5259 if (marshaller.IsBool(arg_index)) {
5260 body += BoolToInt();
5261 }
5262
5263 body += UnboxTruncate(marshaller.RepInDart(arg_index));
5264 }
5265
5266 if (marshaller.RequiresBitCast(arg_index)) {
5267 body += BitCast(
5268 marshaller.RepInDart(arg_index),
5269 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)));
5270 }
5271
5272 return body;
5273}
5274
5275FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
5276 const Function& function) {
5277 switch (function.GetFfiCallbackKind()) {
5280 return BuildGraphOfSyncFfiCallback(function);
5282 return BuildGraphOfAsyncFfiCallback(function);
5283 }
5284 UNREACHABLE();
5285 return nullptr;
5286}
5287
5288Fragment FlowGraphBuilder::FfiNativeLookupAddress(
5289 const dart::Instance& native) {
5290 const auto& native_class = Class::Handle(Z, native.clazz());
5291 ASSERT(String::Handle(Z, native_class.UserVisibleName())
5292 .Equals(Symbols::FfiNative()));
5293 const auto& native_class_fields = Array::Handle(Z, native_class.fields());
5294 ASSERT(native_class_fields.Length() == 4);
5295 const auto& symbol_field =
5296 Field::Handle(Z, Field::RawCast(native_class_fields.At(1)));
5297 ASSERT(!symbol_field.is_static());
5298 const auto& asset_id_field =
5299 Field::Handle(Z, Field::RawCast(native_class_fields.At(2)));
5300 ASSERT(!asset_id_field.is_static());
5301 const auto& symbol =
5302 String::ZoneHandle(Z, String::RawCast(native.GetField(symbol_field)));
5303 const auto& asset_id =
5304 String::ZoneHandle(Z, String::RawCast(native.GetField(asset_id_field)));
5305 const auto& type_args = TypeArguments::Handle(Z, native.GetTypeArguments());
5306 ASSERT(type_args.Length() == 1);
5307 const auto& native_type = AbstractType::ZoneHandle(Z, type_args.TypeAt(0));
5308 intptr_t arg_n;
5309 if (native_type.IsFunctionType()) {
5310 const auto& native_function_type = FunctionType::Cast(native_type);
5311 arg_n = native_function_type.NumParameters() -
5312 native_function_type.num_implicit_parameters();
5313 } else {
5314 // We're looking up the address of a native field.
5315 arg_n = 0;
5316 }
5317 const auto& ffi_resolver =
5318 Function::ZoneHandle(Z, IG->object_store()->ffi_resolver_function());
5319#if !defined(TARGET_ARCH_IA32)
5320 // Access to the pool, use cacheable static call.
5321 Fragment body;
5322 body += Constant(asset_id);
5323 body += Constant(symbol);
5324 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5325 body +=
5326 CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
5327 /*argument_count=*/3,
5328 /*argument_names=*/Array::null_array(),
5329 /*type_args_count=*/0);
5330 return body;
5331#else // !defined(TARGET_ARCH_IA32)
5332 // IA32 only has JIT and no pool. This function will only be compiled if
5333 // immediately run afterwards, so do the lookup here.
5334 char* error = nullptr;
5335#if !defined(DART_PRECOMPILER) || defined(TESTING)
5336 const uintptr_t function_address =
5337 FfiResolveInternal(asset_id, symbol, arg_n, &error);
5338#else
5339 const uintptr_t function_address = 0;
5340 UNREACHABLE(); // JIT runtime should not contain AOT code
5341#endif
5342 if (error == nullptr) {
5343 Fragment body;
5344 body += UnboxedIntConstant(function_address, kUnboxedAddress);
5345 body += ConvertUnboxedToUntagged();
5346 return body;
5347 } else {
5348 free(error);
5349 // Lookup failed, we want to throw an error consistent with AOT, just
5350 // compile into a lookup so that we can throw the error from the same
5351 // error path.
5352 Fragment body;
5353 body += Constant(asset_id);
5354 body += Constant(symbol);
5355 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5356 // Non-cacheable call, this is IA32.
5357 body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
5358 /*argument_count=*/3, ICData::kStatic);
5359 body += UnboxTruncate(kUnboxedAddress);
5360 body += ConvertUnboxedToUntagged();
5361 return body;
5362 }
5363#endif // !defined(TARGET_ARCH_IA32)
5364}
5365
5366Fragment FlowGraphBuilder::FfiNativeFunctionBody(const Function& function) {
5367 ASSERT(function.is_ffi_native());
5369 ASSERT(optimizing_);
5370
5371 const auto& c_signature =
5372 FunctionType::ZoneHandle(Z, function.FfiCSignature());
5373 auto const& native_instance =
5374 Instance::Handle(function.GetNativeAnnotation());
5375
5376 Fragment body;
5377 body += FfiNativeLookupAddress(native_instance);
5378 body += FfiCallFunctionBody(function, c_signature,
5379 /*first_argument_parameter_offset=*/0);
5380 return body;
5381}
5382
5383Fragment FlowGraphBuilder::FfiCallFunctionBody(
5384 const Function& function,
5385 const FunctionType& c_signature,
5386 intptr_t first_argument_parameter_offset) {
5387 ASSERT(function.is_ffi_native() || function.IsFfiCallClosure());
5388
5389 LocalVariable* address = MakeTemporary("address");
5390
5391 Fragment body;
5392
5393 const char* error = nullptr;
5394 const auto marshaller_ptr = compiler::ffi::CallMarshaller::FromFunction(
5395 Z, function, first_argument_parameter_offset, c_signature, &error);
5396 // AbiSpecific integers can be incomplete causing us to not know the calling
5397 // convention. However, this is caught in asFunction in both JIT/AOT.
5398 RELEASE_ASSERT(error == nullptr);
5399 RELEASE_ASSERT(marshaller_ptr != nullptr);
5400 const auto& marshaller = *marshaller_ptr;
5401
5402 const bool signature_contains_handles = marshaller.ContainsHandles();
5403
5404 // FFI trampolines are accessed via closures, so non-covariant argument types
5405 // and type arguments are either statically checked by the type system or
5406 // dynamically checked via dynamic closure call dispatchers.
5407
5408 // Null check arguments before we go into the try catch, so that we don't
5409 // catch our own null errors.
5410 const intptr_t num_args = marshaller.num_args();
5411 for (intptr_t i = 0; i < num_args; i++) {
5412 if (marshaller.IsHandleCType(i)) {
5413 continue;
5414 }
5415 body += LoadLocal(parsed_function_->ParameterVariable(
5416 first_argument_parameter_offset + i));
5417 // TODO(http://dartbug.com/47486): Support entry without checking for null.
5418 // Check for 'null'.
5419 body += CheckNullOptimized(
5421 Z, function.ParameterNameAt(first_argument_parameter_offset + i)),
5423 body += StoreLocal(TokenPosition::kNoSource,
5424 parsed_function_->ParameterVariable(
5425 first_argument_parameter_offset + i));
5426 body += Drop();
5427 }
5428
5429 intptr_t try_handler_index = -1;
5430 if (signature_contains_handles) {
5431 // Wrap in Try catch to transition from Native to Generated on a throw from
5432 // the dart_api.
5433 try_handler_index = AllocateTryIndex();
5434 body += TryCatch(try_handler_index);
5435 ++try_depth_;
5436 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5437 // need it.
5438 // We no longer need the scope for passing in Handle arguments, but the
5439 // native function might for instance be relying on this scope for Dart API.
5440
5441 auto* const arg_reps =
5442 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5443
5444 body += LoadThread(); // argument.
5445 arg_reps->Add(kUntagged);
5446
5447 body += CallLeafRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged,
5448 *arg_reps);
5449 }
5450
5451 // Allocate typed data before FfiCall and pass it in to ffi call if needed.
5452 LocalVariable* return_compound_typed_data = nullptr;
5453 if (marshaller.ReturnsCompound()) {
5454 body += IntConstant(marshaller.CompoundReturnSizeInBytes());
5455 body +=
5456 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5457 return_compound_typed_data = MakeTemporary();
5458 }
5459
5460 // Unbox and push the arguments.
5461 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5462 if (marshaller.IsCompoundCType(i)) {
5463 body += FfiCallConvertCompoundArgumentToNative(
5464 parsed_function_->ParameterVariable(first_argument_parameter_offset +
5465 i),
5466 marshaller, i);
5467 } else {
5468 body += LoadLocal(parsed_function_->ParameterVariable(
5469 first_argument_parameter_offset + i));
5470 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5471 // pointer to the stack slot as the native handle argument.
5472 // Therefore we do not need to wrap handles.
5473 if (!marshaller.IsHandleCType(i)) {
5474 body += FfiConvertPrimitiveToNative(
5475 marshaller, i,
5476 parsed_function_->ParameterVariable(
5477 first_argument_parameter_offset + i));
5478 }
5479 }
5480 }
5481
5482 body += LoadLocal(address);
5483
5484 if (marshaller.ReturnsCompound()) {
5485 body += LoadLocal(return_compound_typed_data);
5486 }
5487
5488 body += FfiCall(marshaller, function.FfiIsLeaf());
5489
5490 const intptr_t num_defs = marshaller.NumReturnDefinitions();
5491 ASSERT(num_defs >= 1);
5492 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5493 LocalVariable* def = MakeTemporary("ffi call result");
5494 defs->Add(def);
5495
5496 if (marshaller.ReturnsCompound()) {
5497 // Drop call result, typed data with contents is already on the stack.
5498 body += DropTemporary(&def);
5499 }
5500
5501 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5502 body += FfiCallConvertCompoundReturnToDart(marshaller,
5504 } else {
5505 body += FfiConvertPrimitiveToDart(marshaller, compiler::ffi::kResultIndex);
5506 }
5507
5508 auto exit_handle_scope = [&]() -> Fragment {
5509 Fragment code;
5510 auto* const arg_reps =
5511 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5512
5513 code += LoadThread(); // argument.
5514 arg_reps->Add(kUntagged);
5515
5516 code += CallLeafRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged,
5517 *arg_reps);
5518 code += Drop();
5519 return code;
5520 };
5521
5522 if (signature_contains_handles) {
5523 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5524 // need it.
5525 body += DropTempsPreserveTop(1); // Drop api_local_scope.
5526 body += exit_handle_scope();
5527 }
5528
5529 body += DropTempsPreserveTop(1); // Drop address.
5530 body += Return(TokenPosition::kNoSource);
5531
5532 if (signature_contains_handles) {
5533 --try_depth_;
5534 ++catch_depth_;
5535 Fragment catch_body =
5536 CatchBlockEntry(Array::empty_array(), try_handler_index,
5537 /*needs_stacktrace=*/true, /*is_synthesized=*/true);
5538
5539 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5540 // need it.
5541 // TODO(41984): If we want to pass in the handle scope, move it out
5542 // of the try catch.
5543 catch_body += exit_handle_scope();
5544
5545 catch_body += LoadLocal(CurrentException());
5546 catch_body += LoadLocal(CurrentStackTrace());
5547 catch_body += RethrowException(TokenPosition::kNoSource, try_handler_index);
5548 --catch_depth_;
5549 }
5550
5551 return body;
5552}
5553
5554Fragment FlowGraphBuilder::LoadNativeArg(
5555 const compiler::ffi::CallbackMarshaller& marshaller,
5556 intptr_t arg_index) {
5557 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
5558 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5559
5560 Fragment fragment;
5561 for (intptr_t j = 0; j < num_defs; j++) {
5562 const intptr_t def_index = marshaller.DefinitionIndex(j, arg_index);
5563 auto* parameter = new (Z) NativeParameterInstr(marshaller, def_index);
5564 Push(parameter);
5565 fragment <<= parameter;
5566 LocalVariable* def = MakeTemporary();
5567 defs->Add(def);
5568 }
5569
5570 if (marshaller.IsCompoundCType(arg_index)) {
5571 fragment +=
5572 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index, defs);
5573 } else {
5574 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
5575 }
5576 return fragment;
5577}
5578
5579FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
5580 const Function& function) {
5581 const char* error = nullptr;
5582 const auto marshaller_ptr =
5584 // AbiSpecific integers can be incomplete causing us to not know the calling
5585 // convention. However, this is caught fromFunction in both JIT/AOT.
5586 RELEASE_ASSERT(error == nullptr);
5587 RELEASE_ASSERT(marshaller_ptr != nullptr);
5588 const auto& marshaller = *marshaller_ptr;
5589 const bool is_closure = function.GetFfiCallbackKind() ==
5591
5592 graph_entry_ =
5593 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5594
5595 auto* const native_entry =
5596 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5598
5599 graph_entry_->set_normal_entry(native_entry);
5600
5601 Fragment function_body(native_entry);
5602 function_body += CheckStackOverflowInPrologue(function.token_pos());
5603
5604 // Wrap the entire method in a big try/catch. This is important to ensure that
5605 // the VM does not crash if the callback throws an exception.
5606 const intptr_t try_handler_index = AllocateTryIndex();
5607 Fragment body = TryCatch(try_handler_index);
5608 ++try_depth_;
5609
5610 LocalVariable* closure = nullptr;
5611 if (is_closure) {
5612 // Load and unwrap closure persistent handle.
5613 body += LoadThread();
5614 body +=
5616 body += LoadNativeField(Slot::PersistentHandle_ptr());
5618 }
5619
5620 // Box and push the arguments.
5621 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5622 body += LoadNativeArg(marshaller, i);
5623 }
5624
5625 if (is_closure) {
5626 // Call the target. The +1 in the argument count is because the closure
5627 // itself is the first argument.
5628 const intptr_t argument_count = marshaller.num_args() + 1;
5629 body += LoadLocal(closure);
5630 if (!FLAG_precompiled_mode) {
5631 // The ClosureCallInstr() takes one explicit input (apart from arguments).
5632 // It uses it to find the target address (in AOT from
5633 // Closure::entry_point, in JIT from Closure::function_::entry_point).
5634 body += LoadNativeField(Slot::Closure_function());
5635 }
5636 body +=
5637 ClosureCall(Function::null_function(), TokenPosition::kNoSource,
5638 /*type_args_len=*/0, argument_count, Array::null_array());
5639 } else {
5640 // Call the target.
5641 //
5642 // TODO(36748): Determine the hot-reload semantics of callbacks and update
5643 // the rebind-rule accordingly.
5644 body += StaticCall(TokenPosition::kNoSource,
5645 Function::ZoneHandle(Z, function.FfiCallbackTarget()),
5646 marshaller.num_args(), Array::empty_array(),
5647 ICData::kNoRebind);
5648 }
5649
5650 if (!marshaller.IsVoid(compiler::ffi::kResultIndex) &&
5651 !marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5652 body += CheckNullOptimized(
5653 String::ZoneHandle(Z, Symbols::New(H.thread(), "return_value")),
5655 }
5656
5657 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5658 body += FfiCallbackConvertCompoundReturnToNative(
5659 marshaller, compiler::ffi::kResultIndex);
5660 } else {
5661 body +=
5662 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5663 }
5664
5665 body += NativeReturn(marshaller);
5666
5667 --try_depth_;
5668 function_body += body;
5669
5670 ++catch_depth_;
5671 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5672 /*needs_stacktrace=*/false,
5673 /*is_synthesized=*/true);
5674
5675 // Return the "exceptional return" value given in 'fromFunction'.
5676 if (marshaller.IsVoid(compiler::ffi::kResultIndex)) {
5677 // The exceptional return is always null -- return nullptr instead.
5678 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5679 catch_body += UnboxedIntConstant(0, kUnboxedIntPtr);
5680 } else if (marshaller.IsPointerPointer(compiler::ffi::kResultIndex)) {
5681 // The exceptional return is always null -- return nullptr instead.
5682 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5683 catch_body += UnboxedIntConstant(0, kUnboxedAddress);
5684 catch_body += ConvertUnboxedToUntagged();
5685 } else if (marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5686 catch_body += UnhandledException();
5687 catch_body +=
5688 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5689 } else if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5690 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5691 // Manufacture empty result.
5692 const intptr_t size =
5693 Utils::RoundUp(marshaller.Location(compiler::ffi::kResultIndex)
5694 .payload_type()
5695 .SizeInBytes(),
5697 catch_body += IntConstant(size);
5698 catch_body +=
5699 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5700 catch_body += WrapTypedDataBaseInCompound(
5702 catch_body += FfiCallbackConvertCompoundReturnToNative(
5703 marshaller, compiler::ffi::kResultIndex);
5704
5705 } else {
5706 catch_body += Constant(
5707 Instance::ZoneHandle(Z, function.FfiCallbackExceptionalReturn()));
5708 catch_body +=
5709 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5710 }
5711
5712 catch_body += NativeReturn(marshaller);
5713 --catch_depth_;
5714
5715 PrologueInfo prologue_info(-1, -1);
5716 return new (Z)
5717 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5718 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5719}
5720
5721FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5722 const Function& function) {
5723 const char* error = nullptr;
5724 const auto marshaller_ptr =
5726 // AbiSpecific integers can be incomplete causing us to not know the calling
5727 // convention. However, this is caught fromFunction in both JIT/AOT.
5728 RELEASE_ASSERT(error == nullptr);
5729 RELEASE_ASSERT(marshaller_ptr != nullptr);
5730 const auto& marshaller = *marshaller_ptr;
5731
5732 // Currently all async FFI callbacks return void. This is enforced by the
5733 // frontend.
5734 ASSERT(marshaller.IsVoid(compiler::ffi::kResultIndex));
5735
5736 graph_entry_ =
5737 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5738
5739 auto* const native_entry =
5740 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5742
5743 graph_entry_->set_normal_entry(native_entry);
5744
5745 Fragment function_body(native_entry);
5746 function_body += CheckStackOverflowInPrologue(function.token_pos());
5747
5748 // Wrap the entire method in a big try/catch. This is important to ensure that
5749 // the VM does not crash if the callback throws an exception.
5750 const intptr_t try_handler_index = AllocateTryIndex();
5751 Fragment body = TryCatch(try_handler_index);
5752 ++try_depth_;
5753
5754 // Box and push the arguments into an array, to be sent to the target.
5756 body += IntConstant(marshaller.num_args());
5757 body += CreateArray();
5758 LocalVariable* array = MakeTemporary();
5759 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5760 body += LoadLocal(array);
5761 body += IntConstant(i);
5762 body += LoadNativeArg(marshaller, i);
5763 body += StoreIndexed(kArrayCid);
5764 }
5765
5766 // Send the arg array to the target. The arg array is still on the stack.
5767 body += Call1ArgStub(TokenPosition::kNoSource,
5769
5770 body += FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5771 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5772 body += NativeReturn(marshaller);
5773
5774 --try_depth_;
5775 function_body += body;
5776
5777 ++catch_depth_;
5778 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5779 /*needs_stacktrace=*/false,
5780 /*is_synthesized=*/true);
5781
5782 // This catch indicates there's been some sort of error, but async callbacks
5783 // are fire-and-forget, and we don't guarantee delivery.
5784 catch_body += NullConstant();
5785 catch_body +=
5786 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5787 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5788 catch_body += NativeReturn(marshaller);
5789 --catch_depth_;
5790
5791 PrologueInfo prologue_info(-1, -1);
5792 return new (Z)
5793 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5794 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5795}
5796
5797void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5798 try_catch_block_ = try_catch_block;
5799 SetCurrentTryIndex(try_catch_block == nullptr ? kInvalidTryIndex
5800 : try_catch_block->try_index());
5801}
5802
5803const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5804 if (prepend_type_arguments_.IsNull()) {
5805 const auto& dart_internal = Library::Handle(Z, Library::InternalLibrary());
5806 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5807 Symbols::PrependTypeArguments());
5808 ASSERT(!prepend_type_arguments_.IsNull());
5809 }
5810 return prepend_type_arguments_;
5811}
5812
5813Fragment FlowGraphBuilder::BuildIntegerHashCode(bool smi) {
5814 Fragment body;
5815 Value* unboxed_value = Pop();
5816 HashIntegerOpInstr* hash =
5817 new HashIntegerOpInstr(unboxed_value, smi, DeoptId::kNone);
5818 Push(hash);
5819 body <<= hash;
5820 return body;
5821}
5822
5823Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5824 Fragment body;
5825 Value* double_value = Pop();
5826 HashDoubleOpInstr* hash = new HashDoubleOpInstr(double_value, DeoptId::kNone);
5827 Push(hash);
5828 body <<= hash;
5829 body += Box(kUnboxedInt64);
5830 return body;
5831}
5832
5834 TokenPosition position,
5835 bool is_exhaustive,
5836 const AbstractType& expression_type,
5837 SwitchBlock* switch_block,
5838 intptr_t case_count)
5839 : zone_(zone),
5840 position_(position),
5841 is_exhaustive_(is_exhaustive),
5842 expression_type_(expression_type),
5843 switch_block_(switch_block),
5844 case_count_(case_count),
5845 case_bodies_(case_count),
5846 case_expression_counts_(case_count),
5847 expressions_(case_count),
5848 sorted_expressions_(case_count) {
5849 case_expression_counts_.FillWith(0, 0, case_count);
5850
5853 is_optimizable_ = true;
5854 } else if (expression_type.HasTypeClass() &&
5856 .is_enum_class()) {
5857 is_optimizable_ = true;
5858 is_enum_switch_ = true;
5859 }
5860 }
5861}
5862
5864 const int64_t min = expression_min().AsInt64Value();
5865 const int64_t max = expression_max().AsInt64Value();
5866 ASSERT(min <= max);
5867 const uint64_t diff = static_cast<uint64_t>(max) - static_cast<uint64_t>(min);
5868 // Saturate to avoid overflow.
5869 if (diff > static_cast<uint64_t>(kMaxInt64 - 1)) {
5870 return kMaxInt64;
5871 }
5872 return static_cast<int64_t>(diff + 1);
5873}
5874
5876 if (is_enum_switch()) {
5877 if (expression_min().IsZero()) {
5878 // Enum indexes are always positive.
5879 return false;
5880 }
5881 }
5882 return true;
5883}
5884
5886 if (is_enum_switch()) {
5887 return has_default() || !is_exhaustive();
5888 }
5889 return true;
5890}
5891
5893 // For small to medium-sized switches, binary search is faster than a
5894 // jump table.
5895 // Please update runtime/tests/vm/dart/optimized_switch_test.dart
5896 // when changing this constant.
5897 const intptr_t kJumpTableMinExpressions = 16;
5898 // This limit comes from IndirectGotoInstr.
5899 // Realistically, the current limit should never be hit by any code.
5900 const intptr_t kJumpTableMaxSize = kMaxInt32;
5901 // Sometimes the switch expressions don't cover a contiguous range.
5902 // If the ratio of holes to expressions is too great we fall back to a
5903 // binary search to avoid code size explosion.
5904 const double kJumpTableMaxHolesRatio = 1.0;
5905
5906 if (!is_optimizable() || expressions().is_empty()) {
5907 // The switch is not optimizable, so we can only use linear scan.
5909 }
5910
5911 if (!CompilerState::Current().is_aot()) {
5912 // JIT mode supports hot-reload, which currently prevents us from
5913 // enabling optimized switches.
5915 }
5916
5917 if (FLAG_force_switch_dispatch_type == kSwitchDispatchLinearScan) {
5919 }
5920
5921 PrepareForOptimizedSwitch();
5922
5923 if (!is_optimizable()) {
5924 // While preparing for an optimized switch we might have discovered that
5925 // the switch is not optimizable after all.
5927 }
5928
5929 if (FLAG_force_switch_dispatch_type == kSwitchDispatchBinarySearch) {
5931 }
5932
5933 const int64_t range = ExpressionRange();
5934 if (range > kJumpTableMaxSize) {
5936 }
5937
5938 const intptr_t num_expressions = expressions().length();
5939 ASSERT(num_expressions <= range);
5940
5941 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5942 const int64_t holes = range - num_expressions;
5943
5944 if (FLAG_force_switch_dispatch_type != kSwitchDispatchJumpTable) {
5945 if (num_expressions < kJumpTableMinExpressions) {
5947 }
5948
5949 if (holes > max_holes) {
5951 }
5952 }
5953
5954 // After this point we will use a jump table.
5955
5956 // In the general case, bounds checks are required before a jump table
5957 // to handle all possible integer values.
5958 // For enums, the set of possible index values is known and much smaller
5959 // than the set of all possible integer values. A jump table that covers
5960 // either or both bounds of the range of index values requires only one or
5961 // no bounds checks.
5962 // If the expressions of an enum switch don't cover the full range of
5963 // values we can try to extend the jump table to cover the full range, but
5964 // not beyond kJumpTableMaxHolesRatio.
5965 // The count of enum values is not available when the flow graph is
5966 // constructed. The lower bound is always 0 so eliminating the lower
5967 // bound check is still possible by extending expression_min to 0.
5968 //
5969 // In the case of an integer switch we try to extend expression_min to 0
5970 // for a different reason.
5971 // If the range starts at zero it directly maps to the jump table
5972 // and we don't need to adjust the switch variable before the
5973 // jump table.
5974 if (expression_min().AsInt64Value() > 0) {
5975 const intptr_t holes_budget = Utils::Minimum(
5976 // Holes still available.
5977 max_holes - holes,
5978 // Entries left in the jump table.
5979 kJumpTableMaxSize - range);
5980
5981 const int64_t required_holes = expression_min().AsInt64Value();
5982 if (required_holes <= holes_budget) {
5983 expression_min_ = &Object::smi_zero();
5984 }
5985 }
5986
5988}
5989
5990void SwitchHelper::PrepareForOptimizedSwitch() {
5991 // Find the min and max of integer representations of expressions.
5992 // We also populate SwitchExpressions.integer for later use.
5993 const Field* enum_index_field = nullptr;
5994 for (intptr_t i = 0; i < expressions_.length(); ++i) {
5995 SwitchExpression& expression = expressions_[i];
5996 sorted_expressions_.Add(&expression);
5997
5998 const Instance& value = expression.value();
5999 const Integer* integer = nullptr;
6000 if (is_enum_switch()) {
6001 if (enum_index_field == nullptr) {
6002 enum_index_field =
6003 &Field::Handle(zone_, IG->object_store()->enum_index_field());
6004 }
6005 integer = &Integer::ZoneHandle(
6006 zone_, Integer::RawCast(value.GetField(*enum_index_field)));
6007 } else {
6008 integer = &Integer::Cast(value);
6009 }
6010 expression.set_integer(*integer);
6011 if (i == 0) {
6012 expression_min_ = integer;
6013 expression_max_ = integer;
6014 } else {
6015 if (expression_min_->CompareWith(*integer) > 0) {
6016 expression_min_ = integer;
6017 }
6018 if (expression_max_->CompareWith(*integer) < 0) {
6019 expression_max_ = integer;
6020 }
6021 }
6022 }
6023
6024 // Sort expressions by their integer value.
6025 sorted_expressions_.Sort(
6026 [](SwitchExpression* const* a, SwitchExpression* const* b) {
6027 return (*a)->integer().CompareWith((*b)->integer());
6028 });
6029
6030 // Check that there are no duplicate case expressions.
6031 // Duplicate expressions are allowed in switch statements, but
6032 // optimized switches don't implemented them.
6033 for (intptr_t i = 0; i < sorted_expressions_.length() - 1; ++i) {
6034 const SwitchExpression& a = *sorted_expressions_.At(i);
6035 const SwitchExpression& b = *sorted_expressions_.At(i + 1);
6036 if (a.integer().Equals(b.integer())) {
6037 is_optimizable_ = false;
6038 break;
6039 }
6040 }
6041}
6042
6043void SwitchHelper::AddExpression(intptr_t case_index,
6044 TokenPosition position,
6045 const Instance& value) {
6046 case_expression_counts_[case_index]++;
6047
6048 expressions_.Add(SwitchExpression(case_index, position, value));
6049
6050 if (is_optimizable_) {
6051 // Check the type of the case expression for use in an optimized switch.
6052 if (!value.IsInstanceOf(expression_type_, Object::null_type_arguments(),
6053 Object::null_type_arguments())) {
6054 is_optimizable_ = false;
6055 }
6056 }
6057}
6058
6059} // namespace kernel
6060
6061} // namespace dart
static int step(int x, SkScalar min, SkScalar max)
Definition: BlurTest.cpp:215
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition: DM.cpp:263
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition: DM.cpp:213
SkPoint pos
static bool equal(const SkBitmap &a, const SkBitmap &b)
Definition: ImageTest.cpp:1395
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
Definition: RefCntTest.cpp:85
void check_bounds(skiatest::Reporter *reporter, const SkPath &path)
Definition: ShadowTest.cpp:183
static uint32_t hash(const SkShaderBase::GradientInfo &v)
SI void store(P *ptr, const T &val)
static size_t element_size(Layout layout, SkSLType type)
#define UNREACHABLE()
Definition: assert.h:248
#define ASSERT_EQUAL(expected, actual)
Definition: assert.h:309
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
#define ASSERT_NOTNULL(ptr)
Definition: assert.h:323
GLenum type
#define CLASS_LIST_TYPED_DATA(V)
Definition: class_id.h:137
#define DART_CLASS_LIST_TYPED_DATA(V)
Definition: class_id.h:177
virtual bool HasTypeClass() const
Definition: object.h:9083
bool IsSmiType() const
Definition: object.h:9246
Nullability nullability() const
Definition: object.h:9060
virtual ClassPtr type_class() const
Definition: object.cc:21042
bool IsIntType() const
Definition: object.cc:21411
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition: dart_entry.h:83
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
void FillWith(const T &value, intptr_t start, intptr_t length)
static const Bool & False()
Definition: object.h:10799
static const Bool & True()
Definition: object.h:10797
@ kDeeplyImmutableAttachNativeFinalizer
Definition: il.h:10916
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
UntaggedClosureData::PackedInstantiationMode PackedInstantiationMode
Definition: object.h:4324
static CompileType Dynamic()
intptr_t GetNextDeoptId()
static CompilerState & Current()
static constexpr intptr_t kNoOSRDeoptId
Definition: compiler.h:73
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static intptr_t InputCountForMarshaller(const compiler::ffi::CallMarshaller &marshaller)
Definition: il.h:6146
static StringPtr GetterSymbol(const String &field_name)
Definition: object.cc:11796
static bool IsGetterName(const String &function_name)
Definition: object.cc:11831
static StringPtr NameFromGetter(const String &getter_name)
Definition: object.cc:11816
static bool SupportsUnboxedSimd128()
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
Definition: flow_graph.h:585
UntaggedFunctionType::PackedNumOptionalParameters PackedNumOptionalParameters
Definition: object.h:9526
UntaggedFunctionType::PackedNumFixedParameters PackedNumFixedParameters
Definition: object.h:9524
UntaggedFunctionType::PackedHasNamedOptionalParameters PackedHasNamedOptionalParameters
Definition: object.h:9520
static bool IsDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4190
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4198
bool IsClosureFunction() const
Definition: object.h:3891
bool IsFactory() const
Definition: object.h:3367
KernelProgramInfoPtr KernelProgramInfo() const
Definition: object.cc:10919
intptr_t NumParameters() const
Definition: object.cc:8877
const char * ToLibNamePrefixedQualifiedCString() const
Definition: object.cc:9769
static bool UseUnboxedRepresentation()
Definition: il.h:10864
void RelinkToOsrEntry(Zone *zone, intptr_t max_block_id)
Definition: il.cc:1749
void AddCatchEntry(CatchBlockEntryInstr *entry)
Definition: il.h:1966
FunctionEntryInstr * normal_entry() const
Definition: il.h:2001
void set_normal_entry(FunctionEntryInstr *entry)
Definition: il.h:2003
@ kNew
Definition: heap.h:38
@ kOld
Definition: heap.h:39
ObjectPtr GetField(const Field &field) const
Definition: object.cc:20475
static intptr_t ElementSizeFor(intptr_t cid)
Definition: object.cc:20967
virtual TypeArgumentsPtr GetTypeArguments() const
Definition: object.cc:20570
@ kNotSpeculative
Definition: il.h:975
virtual Representation representation() const
Definition: il.h:1260
virtual int CompareWith(const Integer &other) const
Definition: object.cc:23076
virtual int64_t AsInt64Value() const
Definition: object.cc:23058
static int EncodeType(Level level, Kind kind)
ClassTable * class_table() const
Definition: isolate.h:496
static LeafRuntimeCallInstr * Make(Zone *zone, Representation return_representation, const ZoneGrowableArray< Representation > &argument_representations, InputsArray &&inputs)
Definition: il.cc:8175
static ClassPtr LookupCoreClass(const String &class_name)
Definition: object.cc:14689
static const String & PrivateCoreLibName(const String &member)
Definition: object.cc:14674
static LibraryPtr InternalLibrary()
Definition: object.cc:14803
static Representation ReturnRepresentation(intptr_t array_cid)
Definition: il.cc:6867
int num_context_variables() const
Definition: scopes.h:361
bool is_captured() const
Definition: scopes.h:143
const AbstractType & static_type() const
Definition: scopes.h:134
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static intptr_t MethodKindToReceiverCid(Kind kind)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
virtual const char * ToCString() const
Definition: object.h:366
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static Object & ZoneHandle()
Definition: object.h:419
ClassPtr clazz() const
Definition: object.h:13218
bool has_entry_points_temp_var() const
Definition: parser.h:171
LocalVariable * expression_temp_var() const
Definition: parser.h:151
const Function & function() const
Definition: parser.h:73
LocalVariable * RawTypeArgumentsVariable() const
Definition: parser.h:223
LocalScope * scope() const
Definition: parser.h:76
const Function * forwarding_stub_super_target() const
Definition: parser.h:204
bool has_receiver_var() const
Definition: parser.h:142
LocalVariable * entry_points_temp_var() const
Definition: parser.h:163
bool has_arg_desc_var() const
Definition: parser.h:130
bool is_forwarding_stub() const
Definition: parser.h:201
DynamicClosureCallVars * dynamic_closure_call_vars() const
Definition: parser.h:289
LocalVariable * ParameterVariable(intptr_t i) const
Definition: parser.h:239
LocalVariable * current_context_var() const
Definition: parser.h:128
LocalVariable * RawParameterVariable(intptr_t i) const
Definition: parser.h:235
LocalVariable * receiver_var() const
Definition: parser.h:133
LocalVariable * function_type_arguments() const
Definition: parser.h:88
static RangeBoundary FromConstant(int64_t val)
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
Definition: object.cc:27847
static DART_NORETURN void LongJump(const Error &error)
Definition: report.cc:86
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add)
Definition: resolver.cc:148
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
Definition: slot.cc:292
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
Definition: slot.cc:324
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
Definition: slot.cc:249
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition: slot.cc:276
static SmiPtr New(intptr_t value)
Definition: object.h:10006
static Representation ValueRepresentation(intptr_t array_cid)
Definition: il.cc:6920
static const String & LAngleBracket()
Definition: symbols.h:623
static const String & RAngleBracket()
Definition: symbols.h:626
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
Definition: symbols.cc:262
static const String & Empty()
Definition: symbols.h:688
static StringPtr New(Thread *thread, const char *cstr)
Definition: symbols.h:723
static const String & Dot()
Definition: symbols.h:613
static Thread * Current()
Definition: thread.h:362
CompilerState & compiler_state()
Definition: thread.h:588
IsolateGroup * isolate_group() const
Definition: thread.h:541
static TokenPosition Synthetic(intptr_t value)
static const TokenPosition kMinSource
static constexpr intptr_t kFlagsPerSmiShift
Definition: object.h:8525
static constexpr intptr_t kFlagsPerSmiMask
Definition: object.h:8531
static TypePtr DynamicType()
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition: il.cc:4043
BitField< decltype(packed_type_parameter_counts_), uint8_t, PackedNumParentTypeArguments::kNextBit, 8 > PackedNumTypeParameters
Definition: raw_object.h:2848
BitField< decltype(packed_type_parameter_counts_), uint8_t, 0, 8 > PackedNumParentTypeArguments
Definition: raw_object.h:2843
static constexpr int ShiftForPowerOfTwo(T x)
Definition: utils.h:81
static T Minimum(T x, T y)
Definition: utils.h:36
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:120
static CallMarshaller * FromFunction(Zone *zone, const Function &function, intptr_t function_params_start_at, const FunctionType &c_signature, const char **error)
Definition: marshaller.cc:90
static CallbackMarshaller * FromFunction(Zone *zone, const Function &function, const char **error)
Definition: marshaller.cc:879
static word ElementSizeFor(intptr_t cid)
Definition: runtime_api.cc:581
static const word kFieldNamesIndexShift
Definition: runtime_api.h:611
static const word kFieldNamesIndexMask
Definition: runtime_api.h:612
static word field_offset(intptr_t index)
static word unboxed_runtime_arg_offset()
static word OffsetFromThread(const dart::Object &object)
Definition: runtime_api.cc:927
Fragment SmiRelationalOp(Token::Kind kind)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Definition * Peek(intptr_t depth=0)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment DebugStepCheck(TokenPosition position)
Fragment CalculateElementAddress(intptr_t index_scale)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment StoreFpRelativeSlot(intptr_t offset)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment StoreStaticField(TokenPosition position, const Field &field)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
Fragment AssertBool(TokenPosition position)
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
Fragment Return(TokenPosition position)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment Constant(const Object &value)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
FlowGraphBuilder(ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, ZoneGrowableArray< intptr_t > *context_level_array, InlineExitCollector *exit_collector, bool optimizing, intptr_t osr_id, intptr_t first_block_id=1, bool inlining_unchecked_entry=false)
Definition: kernel_to_il.cc:64
static bool IsExpressionTempVarUsedInRecognizedMethodFlowGraph(const Function &function)
static bool IsRecognizedMethodForFlowGraph(const Function &function)
void Prepend(Instruction *start)
IntMap< LocalScope * > scopes
IntMap< LocalVariable * > locals
const Instance & value() const
Definition: kernel_to_il.h:937
void set_integer(const Integer &integer)
Definition: kernel_to_il.h:946
const Integer & expression_max() const
const AbstractType & expression_type() const
SwitchHelper(Zone *zone, TokenPosition position, bool is_exhaustive, const AbstractType &expression_type, SwitchBlock *switch_block, intptr_t case_count)
void AddExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const GrowableArray< SwitchExpression > & expressions() const
int64_t ExpressionRange() const
intptr_t case_count() const
bool RequiresUpperBoundCheck() const
SwitchDispatch SelectDispatchStrategy()
const Integer & expression_min() const
const TokenPosition & position() const
bool RequiresLowerBoundCheck() const
static Editor::Movement convert(skui::Key key)
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
static bool b
struct MyStruct a[10]
#define FATAL(error)
AtkStateType state
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
int argument_count
Definition: fuchsia.cc:52
static float max(float r, float g, float b)
Definition: hsl.cpp:49
static float min(float r, float g, float b)
Definition: hsl.cpp:48
#define LOAD_NATIVE_FIELD(V)
#define CASE(method, slot)
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
#define IG
Definition: kernel_to_il.cc:62
#define Z
Definition: kernel_to_il.cc:58
#define STORE_NATIVE_FIELD(V)
#define IL_BODY(method, slot)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
size_t length
def match(bench, filt)
Definition: benchmark.py:23
AlignmentType RecognizedMethodAlignment(MethodRecognizer::Kind kind)
classid_t RecognizedMethodTypeArgCid(MethodRecognizer::Kind kind)
Abi TargetAbi()
Definition: abi.cc:88
const intptr_t kResultIndex
Definition: marshaller.h:28
classid_t ElementExternalTypedDataCid(classid_t class_id)
classid_t ElementTypedDataCid(classid_t class_id)
static constexpr intptr_t kNumParameterFlagsPerElementLog2
Definition: runtime_api.h:326
static constexpr intptr_t kWordSize
Definition: runtime_api.h:274
static constexpr intptr_t kCompressedWordSize
Definition: runtime_api.h:286
static constexpr intptr_t kNumParameterFlagsPerElement
Definition: runtime_api.h:328
FrameLayout frame_layout
Definition: stack_frame.cc:76
const Class & GrowableObjectArrayClass()
Definition: runtime_api.cc:185
static const Function & TypedListSetNativeFunction(Thread *thread, classid_t cid)
static classid_t TypedDataCidUnboxed(Representation unboxed_representation)
@ kSwitchDispatchLinearScan
Definition: kernel_to_il.h:921
@ kSwitchDispatchJumpTable
Definition: kernel_to_il.h:923
@ kSwitchDispatchBinarySearch
Definition: kernel_to_il.h:922
static classid_t external_typed_data_cid(intptr_t chunk_size)
static classid_t typed_data_cid(intptr_t chunk_size)
const Function & TypedListGetNativeFunction(Thread *thread, classid_t cid)
static intptr_t chunk_size(intptr_t bytes_left)
static bool CanUnboxElements(classid_t cid)
Definition: dart_vm.cc:33
constexpr int64_t kMaxInt64
Definition: globals.h:486
static const char *const names[]
Definition: symbols.cc:24
const char *const name
bool IsTypedDataBaseClassId(intptr_t index)
Definition: class_id.h:429
static constexpr Representation kUnboxedUword
Definition: locations.h:171
@ TypedDataView_offset_in_bytes
Definition: il_test.cc:1251
@ TypedDataBase_length
Definition: il_test.cc:1250
@ TypedDataView_typed_data
Definition: il_test.cc:1252
int32_t classid_t
Definition: globals.h:524
@ kNoStoreBarrier
Definition: il.h:6301
@ kEmitStoreBarrier
Definition: il.h:6301
@ kByteDataViewCid
Definition: class_id.h:244
@ kDynamicCid
Definition: class_id.h:253
@ kUnmodifiableByteDataViewCid
Definition: class_id.h:245
Representation
Definition: locations.h:66
constexpr intptr_t kBitsPerByte
Definition: globals.h:463
GrowableArray< Value * > InputsArray
Definition: il.h:901
bool IsZero(char *begin, char *end)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
const intptr_t cid
static constexpr Representation kUnboxedAddress
Definition: locations.h:182
constexpr int32_t kMaxInt32
Definition: globals.h:483
intptr_t FfiResolveInternal(const String &asset, const String &symbol, uintptr_t args_n, char **error)
constexpr intptr_t kWordSize
Definition: globals.h:509
static constexpr Representation kUnboxedIntPtr
Definition: locations.h:176
@ kFunctions
Definition: object.h:2251
@ kCurrentClass
Definition: object.h:2250
static constexpr Representation kUnboxedWord
Definition: locations.h:164
static constexpr intptr_t kInvalidTryIndex
bool IsExternalTypedDataClassId(intptr_t index)
Definition: class_id.h:447
AlignmentType
Definition: il.h:6764
@ kUnalignedAccess
Definition: il.h:6765
def call(args)
Definition: dom.py:159
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition: switches.h:228
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
std::function< void()> closure
Definition: closure.h:14
Definition: __init__.py:1
static SkString join(const CommandLineFlags::StringArray &)
Definition: skpbench.cpp:741
SeparatedVector2 offset
Definition: SkMD5.cpp:130
static constexpr size_t ValueSize(Representation rep)
Definition: locations.h:112
static Representation RepresentationOfArrayElement(classid_t cid)
Definition: locations.cc:79
ParsedFunction::DynamicClosureCallVars *const vars
ClosureCallInfo(LocalVariable *closure, JoinEntryInstr *throw_no_such_method, const Array &arguments_descriptor_array, ParsedFunction::DynamicClosureCallVars *const vars)