48 "Print huge methods (less optimized)");
51 force_switch_dispatch_type,
53 "Force switch statements to use a particular dispatch type: "
54 "-1=auto, 0=linear scan, 1=binary search, 2=jump table");
59#define H (translation_helper_)
60#define T (type_translator_)
61#define I Isolate::Current()
62#define IG IsolateGroup::Current()
71 intptr_t first_block_id,
72 bool inlining_unchecked_entry)
78 inlining_unchecked_entry),
79 translation_helper_(
Thread::Current()),
80 thread_(translation_helper_.thread()),
81 zone_(translation_helper_.zone()),
82 parsed_function_(parsed_function),
83 optimizing_(optimizing),
84 ic_data_array_(*ic_data_array),
89 block_expression_depth_(0),
90 graph_entry_(nullptr),
92 breakable_block_(nullptr),
93 switch_block_(nullptr),
94 try_catch_block_(nullptr),
95 try_finally_block_(nullptr),
96 catch_block_(nullptr),
97 prepend_type_arguments_(
Function::ZoneHandle(zone_)) {
100 H.InitFromKernelProgramInfo(
info);
105Fragment FlowGraphBuilder::EnterScope(
106 intptr_t kernel_offset,
111 instructions += PushContext(scope);
112 instructions +=
Drop();
114 if (context_scope !=
nullptr) {
115 *context_scope = scope;
120Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
121 Fragment instructions;
122 const intptr_t context_size =
123 scopes_->
scopes.Lookup(kernel_offset)->num_context_variables();
124 if (context_size > 0) {
125 instructions += PopContext();
130Fragment FlowGraphBuilder::AdjustContextTo(
int depth) {
131 ASSERT(depth <= context_depth_ && depth >= 0);
132 Fragment instructions;
135 instructions +=
StoreLocal(TokenPosition::kNoSource,
137 instructions +=
Drop();
143Fragment FlowGraphBuilder::PushContext(
const LocalScope* scope) {
144 ASSERT(scope->num_context_variables() > 0);
147 instructions += LoadLocal(context);
151 instructions +=
StoreLocal(TokenPosition::kNoSource,
157Fragment FlowGraphBuilder::PopContext() {
161Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
163 Fragment instructions;
168 while (
function.IsClosureFunction()) {
177 instructions += LoadLocal(parsed_function_->
receiver_var());
189Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
190 Fragment instructions;
204Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
205 const TypeArguments& type_arguments) {
206 Fragment instructions;
208 auto const mode = type_arguments.GetInstantiationMode(
214 instructions +=
Constant(type_arguments);
226 instructions += LoadInstantiatorTypeArguments();
229 instructions += LoadFunctionTypeArguments();
235 instructions += LoadInstantiatorTypeArguments();
239 if (!type_arguments.IsInstantiated(
kFunctions)) {
240 instructions += LoadFunctionTypeArguments();
250Fragment FlowGraphBuilder::CatchBlockEntry(
const Array& handler_types,
251 intptr_t handler_index,
252 bool needs_stacktrace,
253 bool is_synthesized) {
254 LocalVariable* exception_var = CurrentException();
255 LocalVariable* stacktrace_var = CurrentStackTrace();
256 LocalVariable* raw_exception_var = CurrentRawException();
257 LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
259 CatchBlockEntryInstr* entry =
new (
Z) CatchBlockEntryInstr(
263 stacktrace_var, raw_exception_var, raw_stacktrace_var);
266 Fragment instructions(entry);
271 const bool should_restore_closure_context =
274 if (should_restore_closure_context) {
278 ASSERT(!closure_parameter->is_captured());
279 instructions += LoadLocal(closure_parameter);
281 instructions +=
StoreLocal(TokenPosition::kNoSource, context_variable);
282 instructions +=
Drop();
285 if (exception_var->is_captured()) {
286 instructions += LoadLocal(context_variable);
287 instructions += LoadLocal(raw_exception_var);
291 if (stacktrace_var->is_captured()) {
292 instructions += LoadLocal(context_variable);
293 instructions += LoadLocal(raw_stacktrace_var);
304 ASSERT(!CurrentCatchContext()->is_captured() ||
305 CurrentCatchContext()->owner()->context_level() == 0);
307 instructions += LoadLocal(CurrentCatchContext());
308 instructions +=
StoreLocal(TokenPosition::kNoSource,
310 instructions +=
Drop();
316Fragment FlowGraphBuilder::TryCatch(
int try_handler_index) {
325 body +=
StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
328 return Fragment(body.entry, entry);
331Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
332 TokenPosition position) {
337Fragment FlowGraphBuilder::CloneContext(
338 const ZoneGrowableArray<const Slot*>& context_slots) {
341 Fragment instructions = LoadLocal(context_variable);
343 CloneContextInstr* clone_instruction =
new (
Z) CloneContextInstr(
345 instructions <<= clone_instruction;
346 Push(clone_instruction);
348 instructions +=
StoreLocal(TokenPosition::kNoSource, context_variable);
349 instructions +=
Drop();
353Fragment FlowGraphBuilder::InstanceCall(
354 TokenPosition position,
357 intptr_t type_args_len,
359 const Array& argument_names,
360 intptr_t checked_argument_count,
361 const Function& interface_target,
362 const Function& tearoff_interface_target,
363 const InferredTypeMetadata* result_type,
364 bool use_unchecked_entry,
365 const CallSiteAttributesMetadata* call_site_attrs,
366 bool receiver_is_not_smi,
367 bool is_call_on_this) {
368 const intptr_t total_count =
argument_count + (type_args_len > 0 ? 1 : 0);
370 InstanceCallInstr*
call =
new (
Z) InstanceCallInstr(
371 InstructionSource(position),
name, kind, std::move(arguments),
372 type_args_len, argument_names, checked_argument_count, ic_data_array_,
374 if ((result_type !=
nullptr) && !result_type->IsTrivial()) {
375 call->SetResultType(
Z, result_type->ToCompileType(
Z));
377 if (use_unchecked_entry) {
378 call->set_entry_kind(Code::EntryKind::kUnchecked);
380 if (is_call_on_this) {
381 call->mark_as_call_on_this();
383 if (call_site_attrs !=
nullptr && call_site_attrs->receiver_type !=
nullptr &&
384 call_site_attrs->receiver_type->IsInstantiated()) {
385 call->set_receivers_static_type(call_site_attrs->receiver_type);
386 }
else if (!interface_target.IsNull()) {
388 const AbstractType&
type =
390 call->set_receivers_static_type(&
type);
392 call->set_receiver_is_not_smi(receiver_is_not_smi);
394 if (result_type !=
nullptr && result_type->IsConstant()) {
395 Fragment instructions(call);
396 instructions +=
Drop();
397 instructions +=
Constant(result_type->constant_value);
400 return Fragment(call);
403Fragment FlowGraphBuilder::FfiCall(
404 const compiler::ffi::CallMarshaller& marshaller,
408 const intptr_t num_arguments =
411 FfiCallInstr*
const call =
new (
Z)
412 FfiCallInstr(
GetNextDeoptId(), marshaller, is_leaf, std::move(arguments));
419Fragment FlowGraphBuilder::CallLeafRuntimeEntry(
420 const RuntimeEntry& entry,
422 const ZoneGrowableArray<Representation>& argument_representations) {
425 body += LoadThread();
426 body +=
LoadUntagged(compiler::target::Thread::OffsetFromThread(&entry));
428 const intptr_t num_arguments = argument_representations.length() + 1;
431 Z, return_representation, argument_representations, std::move(arguments));
438Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
439 int catch_try_index) {
440 Fragment instructions;
443 instructions += Fragment(
new (
Z) ReThrowInstr(
444 InstructionSource(position), catch_try_index,
454Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
457 const ParsedFunction* pf = parsed_function_;
458 if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
459 variable == pf->receiver_var()) {
460 ASSERT(variable == pf->ParameterVariable(0));
461 variable = pf->RawParameterVariable(0);
463 if (variable->is_captured()) {
464 Fragment instructions;
465 instructions +=
LoadContextAt(variable->owner()->context_level());
474IndirectGotoInstr* FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
476 return new (
Z) IndirectGotoInstr(target_count, index);
479Fragment FlowGraphBuilder::ThrowLateInitializationError(
480 TokenPosition position,
481 const char* throw_method_name,
482 const String&
name) {
488 const auto&
error = klass.EnsureIsFinalized(thread_);
490 const Function& throw_new =
492 H.DartSymbolObfuscate(throw_method_name)));
493 ASSERT(!throw_new.IsNull());
495 Fragment instructions;
502 instructions +=
Drop();
507Fragment FlowGraphBuilder::StoreLateField(
const Field& field,
509 LocalVariable* setter_value) {
510 Fragment instructions;
511 TargetEntryInstr* is_uninitialized;
512 TargetEntryInstr* is_initialized;
513 const TokenPosition position = field.token_pos();
514 const bool is_static = field.is_static();
515 const bool is_final = field.is_final();
522 instructions += LoadLocal(
instance);
525 instructions +=
Constant(Object::sentinel());
531 Fragment initialize(is_uninitialized);
532 initialize +=
Goto(join);
537 Fragment already_initialized(is_initialized);
538 already_initialized += ThrowLateInitializationError(
539 position,
"_throwFieldAlreadyInitialized",
541 already_initialized +=
Goto(join);
544 instructions = Fragment(instructions.entry, join);
548 instructions += LoadLocal(
instance);
550 instructions += LoadLocal(setter_value);
560Fragment FlowGraphBuilder::NativeCall(
const String&
name,
564 const intptr_t num_args =
567 Fragment instructions;
571 NativeCallInstr*
call =
new (
Z) NativeCallInstr(
573 InstructionSource(
function.end_token_pos()), std::move(arguments));
575 instructions <<=
call;
579Fragment FlowGraphBuilder::Return(TokenPosition position,
580 bool omit_result_type_check) {
581 Fragment instructions;
586 if (!omit_result_type_check &&
function.is_old_native()) {
587 const AbstractType& return_type =
589 instructions += CheckAssignable(return_type, Symbols::FunctionResult());
592 if (NeedsDebugStepCheck(
function, position)) {
601Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
609void FlowGraphBuilder::SetResultTypeForStaticCall(
610 StaticCallInstr* call,
613 const InferredTypeMetadata* result_type) {
614 if (
call->InitResultType(
Z)) {
616 (result_type->cid ==
call->result_cid()));
619 if ((result_type !=
nullptr) && !result_type->IsTrivial()) {
620 call->SetResultType(
Z, result_type->ToCompileType(
Z));
624Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
627 const Array& argument_names,
629 const InferredTypeMetadata* result_type,
630 intptr_t type_args_count,
631 bool use_unchecked_entry) {
632 const intptr_t total_count =
argument_count + (type_args_count > 0 ? 1 : 0);
634 StaticCallInstr*
call =
new (
Z) StaticCallInstr(
635 InstructionSource(position),
target, type_args_count, argument_names,
636 std::move(arguments), ic_data_array_,
GetNextDeoptId(), rebind_rule);
638 if (use_unchecked_entry) {
639 call->set_entry_kind(Code::EntryKind::kUnchecked);
642 if (result_type !=
nullptr && result_type->IsConstant()) {
643 Fragment instructions(call);
644 instructions +=
Drop();
645 instructions +=
Constant(result_type->constant_value);
648 return Fragment(call);
651Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
655 const Array& argument_names,
656 intptr_t type_args_count) {
657 const intptr_t total_count =
argument_count + (type_args_count > 0 ? 1 : 0);
659 CachableIdempotentCallInstr*
call =
new (
Z) CachableIdempotentCallInstr(
660 InstructionSource(position), representation,
target, type_args_count,
663 return Fragment(call);
666Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
667 Fragment instructions;
668 instructions += StaticCall(
674Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
675 Fragment instructions;
682Fragment FlowGraphBuilder::ThrowTypeError() {
686 GrowableHandlePtrArray<const String> pieces(
Z, 3);
687 pieces.Add(Symbols::TypeError());
689 pieces.Add(
H.DartSymbolObfuscate(
"_create"));
692 Z, klass.LookupConstructorAllowPrivate(
694 ASSERT(!constructor.IsNull());
696 const String& url =
H.DartString(
700 Fragment instructions;
703 instructions +=
AllocateObject(TokenPosition::kNoSource, klass, 0);
707 instructions += LoadLocal(
instance);
711 instructions +=
Constant(
H.DartSymbolPlain(
"Malformed type."));
713 instructions += StaticCall(TokenPosition::kNoSource, constructor,
715 instructions +=
Drop();
723Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
725 bool incompatible_arguments,
726 bool receiver_pushed) {
730 if (
target.IsImplicitGetterFunction() ||
target.IsGetterFunction() ||
731 target.IsRecordFieldGetter()) {
733 }
else if (
target.IsImplicitSetterFunction() ||
target.IsSetterFunction()) {
737 if (owner.IsTopLevel()) {
738 if (incompatible_arguments) {
739 receiver =
target.UserVisibleSignature();
743 receiver = owner.RareType();
744 if (
target.kind() == UntaggedFunction::kConstructor) {
746 }
else if (
target.IsRecordFieldGetter()) {
753 Fragment instructions;
754 if (!receiver_pushed) {
763Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
764 const String& selector,
767 bool receiver_pushed) {
771 const auto&
error = klass.EnsureIsFinalized(
H.thread());
774 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNew()));
775 ASSERT(!throw_function.IsNull());
777 Fragment instructions;
778 if (!receiver_pushed) {
787 instructions += StaticCall(position, throw_function, 7,
792LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
793 LocalVariable*
local = scopes_->
locals.Lookup(kernel_offset);
808 !
function.IsDynamicInvocationForwarder()) {
810 FATAL(
"Recognized method %s is not marked with the vm:recognized pragma.",
813 FATAL(
"Non-recognized method %s is marked with the vm:recognized pragma.",
820 intptr_t kernel_data_program_offset =
function.KernelLibraryOffset();
823 this, kernel_data, kernel_data_program_offset);
830 FLAG_huge_method_cutoff_in_ast_nodes) {
831 if (FLAG_print_huge_methods) {
833 "Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
835 "disabled, and the compiler might run out of memory. "
836 "Consider refactoring this code into smaller components.\n",
837 function.QualifiedUserVisibleNameCString(),
843 result->mark_huge_method();
860 for (intptr_t i = 0; i <
function.NumParameters(); ++i) {
866 Return(TokenPosition::kNoSource,
false);
875 case kUnboxedInt32x4:
876 case kUnboxedFloat32x4:
877 case kUnboxedFloat64x2:
890 return state.TypedListGetFloat64();
891 case kUnboxedInt32x4:
892 return state.TypedListGetInt32x4();
893 case kUnboxedFloat32x4:
894 return state.TypedListGetFloat32x4();
895 case kUnboxedFloat64x2:
896 return state.TypedListGetFloat64x2();
899 return Object::null_function();
903#define LOAD_NATIVE_FIELD(V) \
904 V(ByteDataViewLength, TypedDataBase_length) \
905 V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
906 V(ByteDataViewTypedData, TypedDataView_typed_data) \
907 V(Finalizer_getCallback, Finalizer_callback) \
908 V(FinalizerBase_getAllEntries, FinalizerBase_all_entries) \
909 V(FinalizerBase_getDetachments, FinalizerBase_detachments) \
910 V(FinalizerEntry_getDetach, FinalizerEntry_detach) \
911 V(FinalizerEntry_getNext, FinalizerEntry_next) \
912 V(FinalizerEntry_getToken, FinalizerEntry_token) \
913 V(FinalizerEntry_getValue, FinalizerEntry_value) \
914 V(NativeFinalizer_getCallback, NativeFinalizer_callback) \
915 V(GrowableArrayLength, GrowableObjectArray_length) \
916 V(ReceivePort_getSendPort, ReceivePort_send_port) \
917 V(ReceivePort_getHandler, ReceivePort_handler) \
918 V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
919 V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
920 V(LinkedHashBase_getData, LinkedHashBase_data) \
921 V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
922 V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
923 V(LinkedHashBase_getIndex, LinkedHashBase_index) \
924 V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
925 V(ObjectArrayLength, Array_length) \
926 V(Record_shape, Record_shape) \
927 V(SuspendState_getFunctionData, SuspendState_function_data) \
928 V(SuspendState_getThenCallback, SuspendState_then_callback) \
929 V(SuspendState_getErrorCallback, SuspendState_error_callback) \
930 V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
931 V(TypedDataViewTypedData, TypedDataView_typed_data) \
932 V(TypedListBaseLength, TypedDataBase_length) \
933 V(WeakProperty_getKey, WeakProperty_key) \
934 V(WeakProperty_getValue, WeakProperty_value) \
935 V(WeakReference_getTarget, WeakReference_target)
937#define STORE_NATIVE_FIELD(V) \
938 V(Finalizer_setCallback, Finalizer_callback) \
939 V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
940 V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
941 V(FinalizerEntry_setToken, FinalizerEntry_token) \
942 V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
943 V(ReceivePort_setHandler, ReceivePort_handler) \
944 V(LinkedHashBase_setData, LinkedHashBase_data) \
945 V(LinkedHashBase_setIndex, LinkedHashBase_index) \
946 V(SuspendState_setFunctionData, SuspendState_function_data) \
947 V(SuspendState_setThenCallback, SuspendState_then_callback) \
948 V(SuspendState_setErrorCallback, SuspendState_error_callback) \
949 V(WeakProperty_setKey, WeakProperty_key) \
950 V(WeakProperty_setValue, WeakProperty_value) \
951 V(WeakReference_setTarget, WeakReference_target)
953#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
954 V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
955 V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
956 V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
963#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
964 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
966 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
968 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
971#undef TYPED_DATA_GET_INDEXED_CASES
972 case MethodRecognizer::kObjectArrayGetIndexed:
973 case MethodRecognizer::kGrowableArrayGetIndexed:
974 case MethodRecognizer::kRecord_fieldAt:
975 case MethodRecognizer::kRecord_fieldNames:
976 case MethodRecognizer::kRecord_numFields:
977 case MethodRecognizer::kSuspendState_clone:
978 case MethodRecognizer::kSuspendState_resume:
979 case MethodRecognizer::kTypedList_GetInt8:
980 case MethodRecognizer::kTypedList_SetInt8:
981 case MethodRecognizer::kTypedList_GetUint8:
982 case MethodRecognizer::kTypedList_SetUint8:
983 case MethodRecognizer::kTypedList_GetInt16:
984 case MethodRecognizer::kTypedList_SetInt16:
985 case MethodRecognizer::kTypedList_GetUint16:
986 case MethodRecognizer::kTypedList_SetUint16:
987 case MethodRecognizer::kTypedList_GetInt32:
988 case MethodRecognizer::kTypedList_SetInt32:
989 case MethodRecognizer::kTypedList_GetUint32:
990 case MethodRecognizer::kTypedList_SetUint32:
991 case MethodRecognizer::kTypedList_GetInt64:
992 case MethodRecognizer::kTypedList_SetInt64:
993 case MethodRecognizer::kTypedList_GetUint64:
994 case MethodRecognizer::kTypedList_SetUint64:
995 case MethodRecognizer::kTypedList_GetFloat32:
996 case MethodRecognizer::kTypedList_SetFloat32:
997 case MethodRecognizer::kTypedList_GetFloat64:
998 case MethodRecognizer::kTypedList_SetFloat64:
999 case MethodRecognizer::kTypedList_GetInt32x4:
1000 case MethodRecognizer::kTypedList_SetInt32x4:
1001 case MethodRecognizer::kTypedList_GetFloat32x4:
1002 case MethodRecognizer::kTypedList_SetFloat32x4:
1003 case MethodRecognizer::kTypedList_GetFloat64x2:
1004 case MethodRecognizer::kTypedList_SetFloat64x2:
1005 case MethodRecognizer::kTypedData_memMove1:
1006 case MethodRecognizer::kTypedData_memMove2:
1007 case MethodRecognizer::kTypedData_memMove4:
1008 case MethodRecognizer::kTypedData_memMove8:
1009 case MethodRecognizer::kTypedData_memMove16:
1010 case MethodRecognizer::kTypedData_ByteDataView_factory:
1011 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
1012 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
1013 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
1014 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
1015 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
1016 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
1017 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
1018 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
1019 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
1020 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
1021 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
1022 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
1023 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
1024 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
1025 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1026 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
1027 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
1028 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
1029 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
1030 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
1031 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
1032 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
1033 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
1034 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
1035 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
1036 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
1037 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
1038 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
1039 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
1040 case MethodRecognizer::kTypedData_Int8Array_factory:
1041 case MethodRecognizer::kTypedData_Uint8Array_factory:
1042 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
1043 case MethodRecognizer::kTypedData_Int16Array_factory:
1044 case MethodRecognizer::kTypedData_Uint16Array_factory:
1045 case MethodRecognizer::kTypedData_Int32Array_factory:
1046 case MethodRecognizer::kTypedData_Uint32Array_factory:
1047 case MethodRecognizer::kTypedData_Int64Array_factory:
1048 case MethodRecognizer::kTypedData_Uint64Array_factory:
1049 case MethodRecognizer::kTypedData_Float32Array_factory:
1050 case MethodRecognizer::kTypedData_Float64Array_factory:
1051 case MethodRecognizer::kTypedData_Float32x4Array_factory:
1052 case MethodRecognizer::kTypedData_Int32x4Array_factory:
1053 case MethodRecognizer::kTypedData_Float64x2Array_factory:
1054 case MethodRecognizer::kMemCopy:
1055 case MethodRecognizer::kFfiLoadInt8:
1056 case MethodRecognizer::kFfiLoadInt16:
1057 case MethodRecognizer::kFfiLoadInt32:
1058 case MethodRecognizer::kFfiLoadInt64:
1059 case MethodRecognizer::kFfiLoadUint8:
1060 case MethodRecognizer::kFfiLoadUint16:
1061 case MethodRecognizer::kFfiLoadUint32:
1062 case MethodRecognizer::kFfiLoadUint64:
1063 case MethodRecognizer::kFfiLoadFloat:
1064 case MethodRecognizer::kFfiLoadFloatUnaligned:
1065 case MethodRecognizer::kFfiLoadDouble:
1066 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1067 case MethodRecognizer::kFfiLoadPointer:
1068 case MethodRecognizer::kFfiNativeCallbackFunction:
1069 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1070 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction:
1071 case MethodRecognizer::kFfiStoreInt8:
1072 case MethodRecognizer::kFfiStoreInt16:
1073 case MethodRecognizer::kFfiStoreInt32:
1074 case MethodRecognizer::kFfiStoreInt64:
1075 case MethodRecognizer::kFfiStoreUint8:
1076 case MethodRecognizer::kFfiStoreUint16:
1077 case MethodRecognizer::kFfiStoreUint32:
1078 case MethodRecognizer::kFfiStoreUint64:
1079 case MethodRecognizer::kFfiStoreFloat:
1080 case MethodRecognizer::kFfiStoreFloatUnaligned:
1081 case MethodRecognizer::kFfiStoreDouble:
1082 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1083 case MethodRecognizer::kFfiStorePointer:
1084 case MethodRecognizer::kFfiFromAddress:
1085 case MethodRecognizer::kFfiGetAddress:
1086 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1087 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1088 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1089 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1090 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1091 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1092 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1093 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1094 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1095 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1096 case MethodRecognizer::kGetNativeField:
1097 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1098 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1099 case MethodRecognizer::kFinalizerBase_setIsolate:
1100 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1101 case MethodRecognizer::kFinalizerEntry_allocate:
1102 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1103 case MethodRecognizer::kCheckNotDeeplyImmutable:
1104 case MethodRecognizer::kObjectEquals:
1105 case MethodRecognizer::kStringBaseCodeUnitAt:
1106 case MethodRecognizer::kStringBaseLength:
1107 case MethodRecognizer::kStringBaseIsEmpty:
1108 case MethodRecognizer::kClassIDgetID:
1109 case MethodRecognizer::kGrowableArrayAllocateWithData:
1110 case MethodRecognizer::kGrowableArrayCapacity:
1111 case MethodRecognizer::kObjectArrayAllocate:
1112 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1113 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1114 case MethodRecognizer::kFfiAbi:
1115 case MethodRecognizer::kUtf8DecoderScan:
1116 case MethodRecognizer::kHas63BitSmis:
1117 case MethodRecognizer::kExtensionStreamHasListener:
1118 case MethodRecognizer::kSmi_hashCode:
1119 case MethodRecognizer::kMint_hashCode:
1120 case MethodRecognizer::kDouble_hashCode:
1121#define CASE(method, slot) case MethodRecognizer::k##method:
1127 case MethodRecognizer::kDoubleToInteger:
1128 case MethodRecognizer::kDoubleMod:
1129 case MethodRecognizer::kDoubleRoundToDouble:
1130 case MethodRecognizer::kDoubleTruncateToDouble:
1131 case MethodRecognizer::kDoubleFloorToDouble:
1132 case MethodRecognizer::kDoubleCeilToDouble:
1133 case MethodRecognizer::kMathDoublePow:
1134 case MethodRecognizer::kMathSin:
1135 case MethodRecognizer::kMathCos:
1136 case MethodRecognizer::kMathTan:
1137 case MethodRecognizer::kMathAsin:
1138 case MethodRecognizer::kMathAcos:
1139 case MethodRecognizer::kMathAtan:
1140 case MethodRecognizer::kMathAtan2:
1141 case MethodRecognizer::kMathExp:
1142 case MethodRecognizer::kMathLog:
1143 case MethodRecognizer::kMathSqrt:
1145 case MethodRecognizer::kDoubleCeilToInt:
1146 case MethodRecognizer::kDoubleFloorToInt:
1148#if defined(TARGET_ARCH_X64)
1150#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1151 defined(TARGET_ARCH_RISCV64)
1164 switch (
function.recognized_kind()) {
1165 case MethodRecognizer::kStringBaseCodeUnitAt:
1172FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1184 BuildPrologue(normal_entry, &prologue_info);
1187 body += CheckStackOverflowInPrologue(
function.token_pos());
1189 if (
function.IsDynamicInvocationForwarder()) {
1190 body += BuildDefaultTypeHandling(
function);
1191 BuildTypeArgumentTypeChecks(
1193 BuildArgumentTypeChecks(&body, &body,
nullptr);
1198#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
1199 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
1201 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
1203 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
1206#undef TYPED_DATA_GET_INDEXED_CASES
1207 case MethodRecognizer::kObjectArrayGetIndexed:
1208 case MethodRecognizer::kGrowableArrayGetIndexed: {
1220 const auto& native_function =
1222 body += LoadLocal(safe_index);
1228 body += StaticCall(TokenPosition::kNoSource, native_function, 2,
1231 if (kind == MethodRecognizer::kGrowableArrayGetIndexed) {
1233 array_cid = kArrayCid;
1238 body += LoadLocal(safe_index);
1242 compiler::target::Instance::ElementSizeFor(array_cid),
1245 if (elem_rep == kUnboxedFloat) {
1252 case MethodRecognizer::kRecord_fieldAt:
1257 kRecordCid, compiler::target::kCompressedWordSize);
1259 case MethodRecognizer::kRecord_fieldNames:
1260 body += LoadObjectStore();
1264 body +=
IntConstant(compiler::target::RecordShape::kFieldNamesIndexShift);
1266 body +=
IntConstant(compiler::target::RecordShape::kFieldNamesIndexMask);
1269 kArrayCid, compiler::target::kCompressedWordSize);
1271 case MethodRecognizer::kRecord_numFields:
1274 body +=
IntConstant(compiler::target::RecordShape::kNumFieldsMask);
1277 case MethodRecognizer::kSuspendState_clone: {
1280 body += Call1ArgStub(TokenPosition::kNoSource,
1284 case MethodRecognizer::kSuspendState_resume: {
1285 const Code& resume_stub =
1291 case MethodRecognizer::kTypedList_GetInt8:
1292 body += BuildTypedListGet(
function, kTypedDataInt8ArrayCid);
1294 case MethodRecognizer::kTypedList_SetInt8:
1295 body += BuildTypedListSet(
function, kTypedDataInt8ArrayCid);
1297 case MethodRecognizer::kTypedList_GetUint8:
1298 body += BuildTypedListGet(
function, kTypedDataUint8ArrayCid);
1300 case MethodRecognizer::kTypedList_SetUint8:
1301 body += BuildTypedListSet(
function, kTypedDataUint8ArrayCid);
1303 case MethodRecognizer::kTypedList_GetInt16:
1304 body += BuildTypedListGet(
function, kTypedDataInt16ArrayCid);
1306 case MethodRecognizer::kTypedList_SetInt16:
1307 body += BuildTypedListSet(
function, kTypedDataInt16ArrayCid);
1309 case MethodRecognizer::kTypedList_GetUint16:
1310 body += BuildTypedListGet(
function, kTypedDataUint16ArrayCid);
1312 case MethodRecognizer::kTypedList_SetUint16:
1313 body += BuildTypedListSet(
function, kTypedDataUint16ArrayCid);
1315 case MethodRecognizer::kTypedList_GetInt32:
1316 body += BuildTypedListGet(
function, kTypedDataInt32ArrayCid);
1318 case MethodRecognizer::kTypedList_SetInt32:
1319 body += BuildTypedListSet(
function, kTypedDataInt32ArrayCid);
1321 case MethodRecognizer::kTypedList_GetUint32:
1322 body += BuildTypedListGet(
function, kTypedDataUint32ArrayCid);
1324 case MethodRecognizer::kTypedList_SetUint32:
1325 body += BuildTypedListSet(
function, kTypedDataUint32ArrayCid);
1327 case MethodRecognizer::kTypedList_GetInt64:
1328 body += BuildTypedListGet(
function, kTypedDataInt64ArrayCid);
1330 case MethodRecognizer::kTypedList_SetInt64:
1331 body += BuildTypedListSet(
function, kTypedDataInt64ArrayCid);
1333 case MethodRecognizer::kTypedList_GetUint64:
1334 body += BuildTypedListGet(
function, kTypedDataUint64ArrayCid);
1336 case MethodRecognizer::kTypedList_SetUint64:
1337 body += BuildTypedListSet(
function, kTypedDataUint64ArrayCid);
1339 case MethodRecognizer::kTypedList_GetFloat32:
1340 body += BuildTypedListGet(
function, kTypedDataFloat32ArrayCid);
1342 case MethodRecognizer::kTypedList_SetFloat32:
1343 body += BuildTypedListSet(
function, kTypedDataFloat32ArrayCid);
1345 case MethodRecognizer::kTypedList_GetFloat64:
1346 body += BuildTypedListGet(
function, kTypedDataFloat64ArrayCid);
1348 case MethodRecognizer::kTypedList_SetFloat64:
1349 body += BuildTypedListSet(
function, kTypedDataFloat64ArrayCid);
1351 case MethodRecognizer::kTypedList_GetInt32x4:
1352 body += BuildTypedListGet(
function, kTypedDataInt32x4ArrayCid);
1354 case MethodRecognizer::kTypedList_SetInt32x4:
1355 body += BuildTypedListSet(
function, kTypedDataInt32x4ArrayCid);
1357 case MethodRecognizer::kTypedList_GetFloat32x4:
1358 body += BuildTypedListGet(
function, kTypedDataFloat32x4ArrayCid);
1360 case MethodRecognizer::kTypedList_SetFloat32x4:
1361 body += BuildTypedListSet(
function, kTypedDataFloat32x4ArrayCid);
1363 case MethodRecognizer::kTypedList_GetFloat64x2:
1364 body += BuildTypedListGet(
function, kTypedDataFloat64x2ArrayCid);
1366 case MethodRecognizer::kTypedList_SetFloat64x2:
1367 body += BuildTypedListSet(
function, kTypedDataFloat64x2ArrayCid);
1369 case MethodRecognizer::kTypedData_memMove1:
1370 body += BuildTypedDataMemMove(
function, kTypedDataInt8ArrayCid);
1372 case MethodRecognizer::kTypedData_memMove2:
1373 body += BuildTypedDataMemMove(
function, kTypedDataInt16ArrayCid);
1375 case MethodRecognizer::kTypedData_memMove4:
1376 body += BuildTypedDataMemMove(
function, kTypedDataInt32ArrayCid);
1378 case MethodRecognizer::kTypedData_memMove8:
1379 body += BuildTypedDataMemMove(
function, kTypedDataInt64ArrayCid);
1381 case MethodRecognizer::kTypedData_memMove16:
1382 body += BuildTypedDataMemMove(
function, kTypedDataInt32x4ArrayCid);
1385 case MethodRecognizer::kTypedData_##name##_factory: \
1386 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1388 case MethodRecognizer::kTypedData_##name##View_factory: \
1389 body += BuildTypedDataViewFactoryConstructor(function, \
1390 kTypedData##name##ViewCid); \
1392 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1393 body += BuildTypedDataViewFactoryConstructor( \
1394 function, kUnmodifiableTypedData##name##ViewCid); \
1398 case MethodRecognizer::kTypedData_ByteDataView_factory:
1401 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1402 body += BuildTypedDataViewFactoryConstructor(
1405 case MethodRecognizer::kObjectEquals:
1411 case MethodRecognizer::kStringBaseCodeUnitAt: {
1421 TargetEntryInstr* one_byte_string;
1422 TargetEntryInstr* two_byte_string;
1428 body.
current = one_byte_string;
1430 body += LoadLocal(safe_index);
1434 compiler::target::Instance::ElementSizeFor(kOneByteStringCid),
1440 body.
current = two_byte_string;
1442 body += LoadLocal(safe_index);
1446 compiler::target::Instance::ElementSizeFor(kTwoByteStringCid),
1454 body += LoadLocal(
result);
1456 case MethodRecognizer::kStringBaseLength:
1457 case MethodRecognizer::kStringBaseIsEmpty:
1461 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1466 case MethodRecognizer::kClassIDgetID:
1471 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1479 body += LoadLocal(
object);
1484 body += LoadLocal(
object);
1491 case MethodRecognizer::kGrowableArrayCapacity:
1497 case MethodRecognizer::kObjectArrayAllocate:
1503 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1510 body +=
MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1515 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1526 case MethodRecognizer::kUtf8DecoderScan:
1540 case MethodRecognizer::kMemCopy: {
1543 LocalVariable* arg_target_offset_in_bytes =
1546 LocalVariable* arg_source_offset_in_bytes =
1548 LocalVariable* arg_length_in_bytes =
1550 body += LoadLocal(arg_source);
1551 body += LoadLocal(arg_target);
1552 body += LoadLocal(arg_source_offset_in_bytes);
1554 body += LoadLocal(arg_target_offset_in_bytes);
1556 body += LoadLocal(arg_length_in_bytes);
1558 body +=
MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1563 case MethodRecognizer::kFfiAbi:
1567 case MethodRecognizer::kFfiNativeCallbackFunction:
1568 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1569 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction: {
1572 "This function should be handled on call site."));
1577 case MethodRecognizer::kFfiLoadInt8:
1578 case MethodRecognizer::kFfiLoadInt16:
1579 case MethodRecognizer::kFfiLoadInt32:
1580 case MethodRecognizer::kFfiLoadInt64:
1581 case MethodRecognizer::kFfiLoadUint8:
1582 case MethodRecognizer::kFfiLoadUint16:
1583 case MethodRecognizer::kFfiLoadUint32:
1584 case MethodRecognizer::kFfiLoadUint64:
1585 case MethodRecognizer::kFfiLoadFloat:
1586 case MethodRecognizer::kFfiLoadFloatUnaligned:
1587 case MethodRecognizer::kFfiLoadDouble:
1588 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1589 case MethodRecognizer::kFfiLoadPointer: {
1599 LocalVariable* arg_typed_data_base =
1603 body += LoadLocal(arg_typed_data_base);
1605 body += LoadLocal(arg_offset);
1610 if (kind == MethodRecognizer::kFfiLoadPointer) {
1611 const auto& pointer_class =
1614 Z,
IG->object_store()->type_argument_never());
1620 body +=
AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1622 body += LoadLocal(pointer);
1623 body += LoadLocal(address);
1637 case MethodRecognizer::kFfiStoreInt8:
1638 case MethodRecognizer::kFfiStoreInt16:
1639 case MethodRecognizer::kFfiStoreInt32:
1640 case MethodRecognizer::kFfiStoreInt64:
1641 case MethodRecognizer::kFfiStoreUint8:
1642 case MethodRecognizer::kFfiStoreUint16:
1643 case MethodRecognizer::kFfiStoreUint32:
1644 case MethodRecognizer::kFfiStoreUint64:
1645 case MethodRecognizer::kFfiStoreFloat:
1646 case MethodRecognizer::kFfiStoreFloatUnaligned:
1647 case MethodRecognizer::kFfiStoreDouble:
1648 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1649 case MethodRecognizer::kFfiStorePointer: {
1658 LocalVariable* arg_typed_data_base =
1665 body += LoadLocal(arg_typed_data_base);
1667 body += LoadLocal(arg_offset);
1670 body += LoadLocal(arg_value);
1672 if (kind == MethodRecognizer::kFfiStorePointer) {
1682 body += UnboxTruncate(
1689 case MethodRecognizer::kFfiFromAddress: {
1690 const auto& pointer_class =
1693 Z,
IG->object_store()->type_argument_never());
1698 body +=
AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1711 case MethodRecognizer::kFfiGetAddress: {
1721 case MethodRecognizer::kHas63BitSmis: {
1722#if defined(HAS_SMI_63_BITS)
1728 case MethodRecognizer::kExtensionStreamHasListener: {
1732 body += LoadServiceExtensionStream();
1736 body += IntToBool();
1739 case MethodRecognizer::kSmi_hashCode: {
1747 body += BuildIntegerHashCode(
true);
1749 case MethodRecognizer::kMint_hashCode: {
1752 body += BuildIntegerHashCode(
false);
1754 case MethodRecognizer::kDouble_hashCode: {
1757 body += UnboxTruncate(kUnboxedDouble);
1758 body += BuildDoubleHashCode();
1759 body +=
Box(kUnboxedInt64);
1761 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1762 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1763 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1764 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1765 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1766 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1767 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1768 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1769 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1770 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1778 const auto& typed_data_class =
1787 body +=
AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1791 body += LoadLocal(typed_data_object);
1792 body += LoadLocal(arg_length);
1798 body += LoadLocal(typed_data_object);
1799 body += LoadLocal(arg_pointer);
1806 case MethodRecognizer::kGetNativeField: {
1820 case MethodRecognizer::kDoubleToInteger:
1821 case MethodRecognizer::kDoubleCeilToInt:
1822 case MethodRecognizer::kDoubleFloorToInt: {
1826 case MethodRecognizer::kDoubleMod:
1827 case MethodRecognizer::kDoubleRoundToDouble:
1828 case MethodRecognizer::kDoubleTruncateToDouble:
1829 case MethodRecognizer::kDoubleFloorToDouble:
1830 case MethodRecognizer::kDoubleCeilToDouble:
1831 case MethodRecognizer::kMathDoublePow:
1832 case MethodRecognizer::kMathSin:
1833 case MethodRecognizer::kMathCos:
1834 case MethodRecognizer::kMathTan:
1835 case MethodRecognizer::kMathAsin:
1836 case MethodRecognizer::kMathAcos:
1837 case MethodRecognizer::kMathAtan:
1838 case MethodRecognizer::kMathAtan2:
1839 case MethodRecognizer::kMathExp:
1840 case MethodRecognizer::kMathLog: {
1841 for (intptr_t i = 0, n =
function.NumParameters(); i < n; ++i) {
1846 ((kind == MethodRecognizer::kDoubleTruncateToDouble) ||
1847 (kind == MethodRecognizer::kDoubleFloorToDouble) ||
1848 (kind == MethodRecognizer::kDoubleCeilToDouble))) {
1850 case MethodRecognizer::kDoubleTruncateToDouble:
1853 case MethodRecognizer::kDoubleFloorToDouble:
1856 case MethodRecognizer::kDoubleCeilToDouble:
1866 case MethodRecognizer::kMathSqrt: {
1870 case MethodRecognizer::kFinalizerBase_setIsolate:
1873 body += LoadIsolate();
1878 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1880 body += LoadIsolate();
1883 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1885 body += LoadIsolate();
1890 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1892 ASSERT(this->optimizing_);
1905 case MethodRecognizer::kFinalizerEntry_allocate: {
1910 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1911 const auto& finalizer_entry_class =
1915 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1918 body += LoadLocal(entry);
1921 body += LoadLocal(entry);
1924 body += LoadLocal(entry);
1927 body += LoadLocal(entry);
1930 body += LoadLocal(entry);
1935 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1939 body +=
Box(kUnboxedInt64);
1941 case MethodRecognizer::kCheckNotDeeplyImmutable:
1948#define IL_BODY(method, slot) \
1949 case MethodRecognizer::k##method: \
1950 ASSERT_EQUAL(function.NumParameters(), 1); \
1951 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1952 body += LoadNativeField(Slot::slot()); \
1956#define IL_BODY(method, slot) \
1957 case MethodRecognizer::k##method: \
1958 ASSERT_EQUAL(function.NumParameters(), 2); \
1959 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1960 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1961 body += StoreNativeField(Slot::slot()); \
1962 body += NullConstant(); \
1966#define IL_BODY(method, slot) \
1967 case MethodRecognizer::k##method: \
1968 ASSERT_EQUAL(function.NumParameters(), 2); \
1969 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1970 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1971 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1973 body += NullConstant(); \
1985 Return(TokenPosition::kNoSource,
true);
1993Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1996 auto token_pos =
function.token_pos();
1999 ASSERT(class_table->HasValidClassAt(
cid));
2020 body += LoadLocal(view_object);
2021 body += LoadLocal(typed_data);
2025 body += LoadLocal(view_object);
2026 body += LoadLocal(offset_in_bytes);
2031 body += LoadLocal(view_object);
2032 body += LoadLocal(
length);
2041 body += LoadLocal(offset_in_bytes);
2043 LocalVariable* unboxed_offset_in_bytes =
2050 body += LoadLocal(view_object);
2051 body += LoadLocal(typed_data);
2055 body += LoadLocal(unboxed_offset_in_bytes);
2065Fragment FlowGraphBuilder::BuildTypedListGet(
const Function&
function,
2067 const intptr_t kNumParameters = 2;
2073 LocalVariable* arg_offset_in_bytes =
2078 body += LoadLocal(arg_receiver);
2079 body += LoadLocal(arg_offset_in_bytes);
2085 body += LoadLocal(arg_receiver);
2086 body += LoadLocal(arg_offset_in_bytes);
2087 body += StaticCall(TokenPosition::kNoSource, native_function,
2088 kNumParameters, ICData::kNoRebind);
2099 case kUnboxedDouble:
2100 return state.TypedListSetFloat64();
2101 case kUnboxedInt32x4:
2102 return state.TypedListSetInt32x4();
2103 case kUnboxedFloat32x4:
2104 return state.TypedListSetFloat32x4();
2105 case kUnboxedFloat64x2:
2106 return state.TypedListSetFloat64x2();
2109 return Object::null_function();
2115 const intptr_t kNumParameters = 3;
2127 body += LoadLocal(arg_receiver);
2128 body += LoadLocal(arg_offset_in_bytes);
2129 body += LoadLocal(arg_value);
2138 body += LoadLocal(arg_receiver);
2139 body += LoadLocal(arg_offset_in_bytes);
2140 body += LoadLocal(arg_value);
2141 body += StaticCall(TokenPosition::kNoSource, native_function,
2142 kNumParameters, ICData::kNoRebind);
2147Fragment FlowGraphBuilder::BuildTypedDataMemMove(
const Function&
function,
2159#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2163 const intptr_t kCopyLengthForCCall = 1024 * 1024;
2171 const intptr_t kCopyLengthForCCall = 1024;
2175 TargetEntryInstr *is_small_enough, *is_too_large;
2176 body += LoadLocal(arg_count);
2181 Fragment use_instruction(is_small_enough);
2182 use_instruction += LoadLocal(arg_from);
2183 use_instruction += LoadLocal(arg_to);
2184 use_instruction += LoadLocal(arg_from_start);
2185 use_instruction += LoadLocal(arg_to_start);
2186 use_instruction += LoadLocal(arg_count);
2191 Fragment call_memmove(is_too_large);
2193 auto*
const arg_reps =
2194 new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
2198 call_memmove += LoadLocal(arg_to_start);
2200 LocalVariable* to_start_unboxed =
MakeTemporary(
"to_start_unboxed");
2201 call_memmove += LoadLocal(arg_from_start);
2203 LocalVariable* from_start_unboxed =
MakeTemporary(
"from_start_unboxed");
2206 call_memmove += LoadLocal(arg_count);
2207 call_memmove += UnboxTruncate(size_rep);
2211 LocalVariable* length_in_bytes =
MakeTemporary(
"length_in_bytes");
2213 call_memmove += LoadLocal(arg_to);
2216 call_memmove += LoadLocal(to_start_unboxed);
2219 arg_reps->Add(kUntagged);
2221 call_memmove += LoadLocal(arg_from);
2224 call_memmove += LoadLocal(from_start_unboxed);
2227 arg_reps->Add(kUntagged);
2229 call_memmove += LoadLocal(length_in_bytes);
2230 arg_reps->Add(size_rep);
2233 CallLeafRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
2235 call_memmove +=
Drop();
2241 body.current =
done;
2247Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
2250 const auto token_pos =
function.token_pos();
2257 Fragment instructions;
2258 instructions += LoadLocal(
length);
2262 return instructions;
2265Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
2266 TokenPosition position,
2267 const Function&
target) {
2274 fragment += LoadLocal(parsed_function_->
receiver_var());
2276 const bool has_instantiator_type_args =
2278 if (has_instantiator_type_args) {
2279 fragment += LoadInstantiatorTypeArguments();
2282 target.IsGeneric(),
true);
2287Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
2288 const AbstractType& dst_type,
2289 const String& name_symbol) {
2293bool FlowGraphBuilder::NeedsDebugStepCheck(
const Function&
function,
2294 TokenPosition position) {
2295 return position.IsDebugPause() && !
function.is_native() &&
2299bool FlowGraphBuilder::NeedsDebugStepCheck(
Value* value,
2300 TokenPosition position) {
2301 if (!position.IsDebugPause()) {
2304 Definition* definition =
value->definition();
2305 if (definition->IsConstant() || definition->IsLoadStaticField() ||
2306 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
2307 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
2310 if (
auto const alloc = definition->AsAllocateClosure()) {
2311 return !alloc->known_function().IsNull();
2316Fragment FlowGraphBuilder::EvaluateAssertion() {
2317 const Class& klass =
2320 const auto&
error = klass.EnsureIsFinalized(
H.thread());
2323 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
2325 return StaticCall(TokenPosition::kNoSource,
target, 1,
2329Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
2330 Fragment instructions;
2332 instructions += LoadLocal(top_of_stack);
2334 instructions +=
Drop();
2335 return instructions;
2338Fragment FlowGraphBuilder::CheckAssignable(
const AbstractType& dst_type,
2339 const String& dst_name,
2341 TokenPosition token_pos) {
2342 Fragment instructions;
2343 if (!dst_type.IsTopTypeForSubtyping()) {
2345 instructions += LoadLocal(top_of_stack);
2347 AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
2348 instructions +=
Drop();
2350 return instructions;
2353Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
2354 TokenPosition position,
2355 const AbstractType& dst_type,
2356 const String& dst_name,
2358 Fragment instructions;
2363 instructions += LoadInstantiatorTypeArguments();
2369 instructions += LoadFunctionTypeArguments();
2376 return instructions;
2379Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
2380 const AbstractType& sub_type_value,
2381 const AbstractType& super_type_value,
2382 const String& dst_name_value) {
2383 Fragment instructions;
2384 instructions += LoadInstantiatorTypeArguments();
2385 instructions += LoadFunctionTypeArguments();
2389 instructions += AssertSubtype(position);
2390 return instructions;
2393Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
2394 Fragment instructions;
2400 Value* instantiator_type_args =
Pop();
2402 AssertSubtypeInstr* instr =
new (
Z) AssertSubtypeInstr(
2403 InstructionSource(position), instantiator_type_args, function_type_args,
2405 instructions += Fragment(instr);
2407 return instructions;
2411 Fragment* implicit_checks) {
2412 const Function& dart_function = parsed_function_->
function();
2414 const Function* forwarding_target =
nullptr;
2417 ASSERT(!forwarding_target->IsNull());
2421 if (dart_function.IsFactory()) {
2422 type_parameters =
Class::Handle(
Z, dart_function.Owner()).type_parameters();
2424 type_parameters = dart_function.type_parameters();
2426 const intptr_t num_type_params = type_parameters.Length();
2427 if (num_type_params == 0)
return;
2428 if (forwarding_target !=
nullptr) {
2429 type_parameters = forwarding_target->type_parameters();
2430 ASSERT(type_parameters.Length() == num_type_params);
2432 if (type_parameters.AllDynamicBounds()) {
2439 for (intptr_t i = 0; i < num_type_params; ++i) {
2440 bound = type_parameters.BoundAt(i);
2441 if (bound.IsTopTypeForSubtyping()) {
2449 if (!type_parameters.IsGenericCovariantImplAt(i)) {
2454 if (type_parameters.IsGenericCovariantImplAt(i)) {
2460 name = type_parameters.NameAt(i);
2462 if (forwarding_target !=
nullptr) {
2463 type_param = forwarding_target->TypeParameterAt(i);
2464 }
else if (dart_function.IsFactory()) {
2465 type_param =
Class::Handle(
Z, dart_function.Owner()).TypeParameterAt(i);
2467 type_param = dart_function.TypeParameterAt(i);
2469 ASSERT(type_param.IsFinalized());
2471 AssertSubtype(TokenPosition::kNoSource, type_param, bound,
name);
2477 if (dart_function.IsClosureFunction() && !
check_bounds.is_empty() &&
2478 FLAG_eliminate_type_checks) {
2487void FlowGraphBuilder::BuildArgumentTypeChecks(
2488 Fragment* explicit_checks,
2489 Fragment* implicit_checks,
2490 Fragment* implicit_redefinitions) {
2491 const Function& dart_function = parsed_function_->
function();
2493 const Function* forwarding_target =
nullptr;
2496 ASSERT(!forwarding_target->IsNull());
2499 const intptr_t num_params = dart_function.NumParameters();
2500 for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
2503 const String&
name = param->
name();
2504 if (!param->needs_type_check()) {
2507 if (param->is_captured()) {
2511 const AbstractType* target_type = ¶m->
static_type();
2512 if (forwarding_target !=
nullptr) {
2518 if (target_type->IsTopTypeForSubtyping())
continue;
2520 const bool is_covariant = param->is_explicit_covariant_parameter();
2521 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2523 *checks += LoadLocal(param);
2524 *checks += AssertAssignableLoadTypeArguments(
2525 param->token_pos(), *target_type,
name,
2526 AssertAssignableInstr::kParameterCheck);
2530 if (!is_covariant && implicit_redefinitions !=
nullptr && optimizing_) {
2533 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2535 *implicit_redefinitions += LoadLocal(param);
2537 *implicit_redefinitions +=
StoreLocal(TokenPosition::kNoSource, param);
2538 *implicit_redefinitions +=
Drop();
2543BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2544 PrologueInfo* prologue_info) {
2547 kernel::PrologueBuilder prologue_builder(
2549 BlockEntryInstr* instruction_cursor =
2550 prologue_builder.BuildPrologue(normal_entry, prologue_info);
2554 return instruction_cursor;
2557ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(
const Function&
function) {
2558 if (!
function.HasOptionalNamedParameters()) {
2562 const intptr_t num_fixed_params =
function.num_fixed_parameters();
2563 const intptr_t num_opt_params =
function.NumOptionalNamedParameters();
2566 for (intptr_t i = 0; i < num_opt_params; ++i) {
2573Fragment FlowGraphBuilder::PushExplicitParameters(
2575 const Function&
target ) {
2576 Fragment instructions;
2577 for (intptr_t i =
function.NumImplicitParameters(),
2581 if (!
target.IsNull() &&
target.is_unboxed_parameter_at(i)) {
2583 if (
target.is_unboxed_integer_parameter_at(i)) {
2587 to = kUnboxedDouble;
2592 push_param += Fragment(unbox);
2594 instructions += push_param;
2596 return instructions;
2599FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2600 const Function& method) {
2611 Fragment body(normal_entry);
2612 body += CheckStackOverflowInPrologue(method.token_pos());
2613 body += BuildImplicitClosureCreation(TokenPosition::kNoSource,
function);
2614 body += Return(TokenPosition::kNoSource);
2617 PrologueInfo prologue_info(-1, -1);
2623FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2635 PrologueInfo prologue_info(-1, -1);
2636 BlockEntryInstr* instruction_cursor =
2637 BuildPrologue(normal_entry, &prologue_info);
2639 Fragment body(instruction_cursor);
2640 body += CheckStackOverflowInPrologue(
function.token_pos());
2655 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2657 body +=
IntConstant(receiver_index + descriptor.Size());
2660 if (receiver_index > 0) {
2662 ASSERT(type_args !=
nullptr);
2663 body += LoadLocal(array);
2665 body += LoadLocal(type_args);
2668 for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
2669 body += LoadLocal(array);
2675 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
2676 const intptr_t parameter_index = descriptor.PositionAt(i);
2677 name = descriptor.NameAt(i);
2679 body += LoadLocal(array);
2680 body +=
IntConstant(receiver_index + parameter_index);
2688 const Class& mirror_class =
2690 ASSERT(!mirror_class.IsNull());
2691 const auto&
error = mirror_class.EnsureIsFinalized(
H.thread());
2694 Z, mirror_class.LookupStaticFunction(
2696 ASSERT(!allocation_function.IsNull());
2698 4, ICData::kStatic);
2700 const int kTypeArgsLen = 0;
2701 ArgumentsDescriptor two_arguments(
2703 Function& no_such_method =
2706 Symbols::NoSuchMethod(), two_arguments));
2707 if (no_such_method.IsNull()) {
2712 Symbols::NoSuchMethod(), two_arguments);
2715 2, ICData::kNSMDispatch);
2716 body += Return(TokenPosition::kNoSource);
2723FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2734 Fragment body(normal_entry);
2735 body += CheckStackOverflowInPrologue(
function.token_pos());
2743 const auto&
error = cls.EnsureIsFinalized(thread_);
2746 Z, cls.LookupFunctionAllowPrivate(Symbols::Get_fieldNames()));
2747 ASSERT(!get_field_names_function.IsNull());
2749 body += StaticCall(TokenPosition::kNoSource, get_field_names_function, 1,
2753 body += LoadLocal(field_names);
2760 body +=
IntConstant(compiler::target::RecordShape::kNumFieldsMask);
2762 body += LoadLocal(num_named);
2764 LocalVariable* num_positional =
MakeTemporary(
"num_positional");
2766 const intptr_t field_index =
2768 if (field_index >= 0) {
2771 body += LoadLocal(num_positional);
2773 TargetEntryInstr* valid_index;
2774 TargetEntryInstr* invalid_index;
2777 body.current = valid_index;
2780 thread_, compiler::target::Record::field_offset(field_index)));
2787 body.current = invalid_index;
2792 body += LoadLocal(num_named);
2794 TargetEntryInstr* has_named_fields;
2795 TargetEntryInstr* no_named_fields;
2796 body +=
BranchIfTrue(&has_named_fields, &no_named_fields);
2798 Fragment(no_named_fields) +
Goto(nsm);
2799 body.current = has_named_fields;
2803 body +=
StoreLocal(TokenPosition::kNoSource, index);
2808 body.current = loop;
2810 body += LoadLocal(field_names);
2811 body += LoadLocal(index);
2813 compiler::target::kCompressedWordSize);
2815 TargetEntryInstr* found;
2816 TargetEntryInstr* continue_search;
2819 body.current = continue_search;
2820 body += LoadLocal(index);
2823 body +=
StoreLocal(TokenPosition::kNoSource, index);
2826 body += LoadLocal(index);
2827 body += LoadLocal(num_named);
2829 TargetEntryInstr* has_more_fields;
2830 TargetEntryInstr* no_more_fields;
2831 body +=
BranchIfTrue(&has_more_fields, &no_more_fields);
2833 Fragment(has_more_fields) +
Goto(loop);
2834 Fragment(no_more_fields) +
Goto(nsm);
2836 body.current = found;
2840 body += LoadLocal(num_positional);
2841 body += LoadLocal(index);
2845 compiler::target::kCompressedWordSize);
2852 body.current =
done;
2856 body += Return(TokenPosition::kNoSource);
2858 Fragment throw_nsm(nsm);
2859 throw_nsm += LoadLocal(parsed_function_->
receiver_var());
2860 throw_nsm += ThrowNoSuchMethodError(TokenPosition::kNoSource,
function,
2866 PrologueInfo prologue_info(-1, -1);
2876 const Array& arguments_descriptor_array,
2906Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2913 check += LoadLocal(
info.type_parameters);
2918 generic.Prepend(is_generic);
2919 generic +=
Goto(after_branch);
2921 not_generic.
Prepend(is_not_generic);
2922 not_generic +=
Goto(after_branch);
2927Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2928 const ClosureCallInfo&
info,
2933 check_required += LoadLocal(
info.vars->current_param_index);
2935 IntConstant(compiler::target::kNumParameterFlagsPerElementLog2);
2937 check_required += LoadLocal(
info.num_opt_params);
2947 check_required += LoadLocal(flags_index);
2948 check_required += LoadLocal(
info.named_parameter_names);
2953 check_required +=
BranchIfTrue(&valid_index, &invalid_index);
2962 check_required.
current = valid_index;
2963 check_required += LoadLocal(
info.named_parameter_names);
2964 check_required += LoadLocal(flags_index);
2966 kArrayCid, compiler::target::kCompressedWordSize);
2967 check_required += LoadLocal(
info.vars->current_param_index);
2969 IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
2973 static_assert(compiler::target::kNumParameterFlags == 1,
2974 "IL builder assumes only one flag bit per parameter");
2977 IntConstant(1 << compiler::target::kRequiredNamedParameterFlag);
2986 set.Prepend(is_set);
2987 set +=
Goto(after_check);
2989 not_set.
Prepend(join_not_set);
2990 not_set +=
Goto(after_check);
2993 check_required.
current = after_check;
2995 return check_required;
2998Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2999 const ClosureCallInfo&
info) {
3000 if (
info.descriptor.TypeArgsLen() > 0) {
3004 Fragment store_provided;
3007 store_provided +=
Drop();
3008 return store_provided;
3013 Fragment store_default;
3014 store_default += LoadLocal(
info.closure);
3017 LocalVariable* closure_data =
MakeTemporary(
"closure_data");
3019 store_default += LoadLocal(closure_data);
3020 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
3022 LocalVariable* default_tav_kind =
MakeTemporary(
"default_tav_kind");
3027 store_default += LoadLocal(default_tav_kind);
3028 TargetEntryInstr* is_instantiated;
3029 TargetEntryInstr* is_not_instantiated;
3032 store_default +=
BranchIfEqual(&is_instantiated, &is_not_instantiated);
3033 store_default.current = is_not_instantiated;
3034 store_default += LoadLocal(default_tav_kind);
3035 TargetEntryInstr* needs_instantiation;
3036 TargetEntryInstr* can_share;
3039 store_default +=
BranchIfEqual(&needs_instantiation, &can_share);
3040 store_default.current = can_share;
3041 store_default += LoadLocal(default_tav_kind);
3042 TargetEntryInstr* can_share_instantiator;
3043 TargetEntryInstr* can_share_function;
3044 store_default +=
IntConstant(
static_cast<intptr_t
>(
3046 store_default +=
BranchIfEqual(&can_share_instantiator, &can_share_function);
3048 Fragment instantiated(is_instantiated);
3049 instantiated += LoadLocal(
info.type_parameters);
3052 instantiated +=
Drop();
3055 Fragment do_instantiation(needs_instantiation);
3057 do_instantiation += LoadLocal(
info.instantiator_type_args);
3060 do_instantiation += LoadLocal(
info.parent_function_type_args);
3062 do_instantiation += LoadLocal(
info.type_parameters);
3066 do_instantiation +=
Drop();
3069 Fragment share_instantiator(can_share_instantiator);
3070 share_instantiator += LoadLocal(
info.instantiator_type_args);
3072 share_instantiator +=
Drop();
3075 Fragment share_function(can_share_function);
3078 share_function += LoadLocal(
info.parent_function_type_args);
3080 share_function +=
Drop();
3083 store_default.current =
done;
3087 Fragment store_delayed;
3088 store_delayed += LoadLocal(
info.closure);
3089 store_delayed +=
LoadNativeField(Slot::Closure_delayed_type_arguments());
3091 store_delayed +=
Drop();
3097Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
3098 const ClosureCallInfo&
info) {
3101 if (
info.descriptor.NamedCount() == 0) {
3104 static_assert(compiler::target::kNumParameterFlags == 1,
3105 "IL builder assumes only one flag bit per parameter");
3111 has_any += LoadLocal(
info.num_opt_params);
3112 has_any += LoadLocal(
info.named_parameter_names);
3114 TargetEntryInstr* no_required;
3115 TargetEntryInstr* has_required;
3118 Fragment(has_required) +
Goto(
info.throw_no_such_method);
3120 return Fragment(has_any.entry, no_required);
3125 Fragment check_names;
3126 check_names += LoadLocal(
info.vars->current_param_index);
3128 check_names += LoadLocal(
info.vars->current_num_processed);
3129 LocalVariable* old_processed =
MakeTemporary(
"old_processed");
3138 check_names +=
Drop();
3141 check_names +=
Drop();
3142 check_names +=
Goto(loop);
3144 Fragment loop_check(loop);
3145 loop_check += LoadLocal(
info.vars->current_param_index);
3146 loop_check += LoadLocal(
info.num_opt_params);
3148 TargetEntryInstr* no_more;
3149 TargetEntryInstr* more;
3154 Fragment loop_body(more);
3156 loop_body += LoadLocal(
info.named_parameter_names);
3157 loop_body += LoadLocal(
info.vars->current_param_index);
3159 kArrayCid, compiler::target::kCompressedWordSize);
3167 for (intptr_t i = 0; i <
info.descriptor.NamedCount(); i++) {
3170 loop_body += LoadLocal(param_name);
3171 TargetEntryInstr*
match;
3172 TargetEntryInstr* mismatch;
3174 loop_body.current = mismatch;
3179 Fragment matched(
match);
3180 matched += LoadLocal(
info.vars->current_param_index);
3181 matched += LoadLocal(
info.num_fixed_params);
3183 matched +=
StoreLocal(
info.vars->named_argument_parameter_indices.At(i));
3185 matched += LoadLocal(
info.vars->current_num_processed);
3190 matched +=
Goto(loop_incr);
3195 loop_body += TestClosureFunctionNamedParameterRequired(
3200 loop_body +=
Goto(loop_incr);
3202 Fragment incr_index(loop_incr);
3204 incr_index += LoadLocal(
info.vars->current_param_index);
3208 incr_index +=
Drop();
3209 incr_index +=
Goto(loop);
3211 Fragment check_processed(
done);
3212 check_processed += LoadLocal(
info.vars->current_num_processed);
3214 TargetEntryInstr* all_processed;
3215 TargetEntryInstr* bad_name;
3216 check_processed +=
BranchIfEqual(&all_processed, &bad_name);
3219 Fragment(bad_name) +
Goto(
info.throw_no_such_method);
3222 check_names.current = all_processed;
3223 check_names += LoadLocal(old_processed);
3225 check_names +=
Drop();
3227 check_names += LoadLocal(old_index);
3229 check_names +=
Drop();
3234Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
3235 const ClosureCallInfo&
info) {
3236 Fragment check_entry;
3238 if (
info.descriptor.TypeArgsLen() > 0) {
3239 Fragment check_type_args_length;
3240 check_type_args_length += LoadLocal(
info.type_parameters);
3241 TargetEntryInstr* null;
3242 TargetEntryInstr* not_null;
3243 check_type_args_length +=
BranchIfNull(&null, ¬_null);
3244 check_type_args_length.current = not_null;
3245 check_type_args_length += LoadLocal(
info.signature);
3246 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
3248 Slot::FunctionType_packed_type_parameter_counts());
3250 TargetEntryInstr*
equal;
3251 TargetEntryInstr* not_equal;
3253 check_type_args_length.current =
equal;
3256 Fragment(null) +
Goto(
info.throw_no_such_method);
3259 Fragment(not_equal) +
Goto(
info.throw_no_such_method);
3265 check_type_args_length);
3268 check_entry += LoadLocal(
info.has_named_params);
3269 TargetEntryInstr* has_named;
3270 TargetEntryInstr* has_positional;
3271 check_entry +=
BranchIfTrue(&has_named, &has_positional);
3273 check_entry.current = join_after_optional;
3275 if (
info.descriptor.NamedCount() > 0) {
3277 Fragment(has_positional) +
Goto(
info.throw_no_such_method);
3279 Fragment check_pos(has_positional);
3280 check_pos += LoadLocal(
info.num_fixed_params);
3283 TargetEntryInstr* enough;
3284 TargetEntryInstr* too_few;
3286 check_pos.current = enough;
3288 Fragment(too_few) +
Goto(
info.throw_no_such_method);
3291 check_pos += LoadLocal(
info.num_max_params);
3293 TargetEntryInstr* valid;
3294 TargetEntryInstr* too_many;
3296 check_pos.current = valid;
3298 Fragment(too_many) +
Goto(
info.throw_no_such_method);
3300 check_pos +=
Goto(join_after_optional);
3303 Fragment check_named(has_named);
3305 TargetEntryInstr* same;
3306 TargetEntryInstr* different;
3307 check_named += LoadLocal(
info.num_fixed_params);
3310 check_named.current = same;
3312 Fragment(different) +
Goto(
info.throw_no_such_method);
3314 if (
info.descriptor.NamedCount() > 0) {
3316 check_named += LoadLocal(
info.num_opt_params);
3318 TargetEntryInstr* valid;
3319 TargetEntryInstr* too_many;
3321 check_named.current = valid;
3323 Fragment(too_many) +
Goto(
info.throw_no_such_method);
3328 check_named += BuildClosureCallNamedArgumentsCheck(
info);
3329 check_named +=
Goto(join_after_optional);
3331 check_entry.current = join_after_optional;
3335Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
3336 const ClosureCallInfo&
info) {
3345 loop_init += LoadLocal(
info.type_parameters);
3347 TargetEntryInstr* null_bounds;
3348 TargetEntryInstr* non_null_bounds;
3349 loop_init +=
BranchIfNull(&null_bounds, &non_null_bounds);
3351 Fragment(null_bounds) +
Goto(
done);
3353 loop_init.current = non_null_bounds;
3357 loop_init +=
Drop();
3358 loop_init +=
Goto(loop);
3360 Fragment loop_check(loop);
3361 loop_check += LoadLocal(
info.vars->current_param_index);
3362 loop_check += LoadLocal(
info.num_type_parameters);
3364 TargetEntryInstr* more;
3365 TargetEntryInstr* no_more;
3370 Fragment loop_test_flag(more);
3373 loop_test_flag += LoadLocal(
info.type_parameter_flags);
3374 TargetEntryInstr* null_flags;
3375 TargetEntryInstr* non_null_flags;
3376 loop_test_flag +=
BranchIfNull(&null_flags, &non_null_flags);
3380 loop_test_flag.current = non_null_flags;
3381 loop_test_flag += LoadLocal(
info.type_parameter_flags);
3382 loop_test_flag += LoadLocal(
info.vars->current_param_index);
3386 kArrayCid, compiler::target::kCompressedWordSize);
3387 loop_test_flag += LoadLocal(
info.vars->current_param_index);
3394 TargetEntryInstr* is_noncovariant;
3395 TargetEntryInstr* is_covariant;
3396 loop_test_flag +=
BranchIfEqual(&is_noncovariant, &is_covariant);
3398 Fragment(is_covariant) +
Goto(
next);
3399 Fragment(is_noncovariant) +
Goto(
check);
3401 Fragment loop_prep_type_param(
check);
3406 loop_prep_type_param += LoadLocal(
info.vars->function_type_args);
3407 TargetEntryInstr* null_ftav;
3408 TargetEntryInstr* non_null_ftav;
3409 loop_prep_type_param +=
BranchIfNull(&null_ftav, &non_null_ftav);
3411 Fragment(null_ftav) +
Goto(dynamic_type_param);
3413 loop_prep_type_param.current = non_null_ftav;
3414 loop_prep_type_param += LoadLocal(
info.vars->function_type_args);
3415 loop_prep_type_param += LoadLocal(
info.vars->current_param_index);
3416 loop_prep_type_param += LoadLocal(
info.num_parent_type_args);
3417 loop_prep_type_param +=
SmiBinaryOp(Token::kADD,
true);
3419 kTypeArgumentsCid, compiler::target::kCompressedWordSize);
3420 loop_prep_type_param +=
StoreLocal(
info.vars->current_type_param);
3421 loop_prep_type_param +=
Drop();
3422 loop_prep_type_param +=
Goto(call);
3424 Fragment loop_dynamic_type_param(dynamic_type_param);
3427 loop_dynamic_type_param +=
StoreLocal(
info.vars->current_type_param);
3428 loop_dynamic_type_param +=
Drop();
3429 loop_dynamic_type_param +=
Goto(call);
3431 Fragment loop_call_check(call);
3433 loop_call_check += LoadLocal(
info.instantiator_type_args);
3434 loop_call_check += LoadLocal(
info.vars->function_type_args);
3436 loop_call_check += LoadLocal(
info.vars->current_type_param);
3438 loop_call_check += LoadLocal(
info.type_parameters);
3440 loop_call_check += LoadLocal(
info.vars->current_param_index);
3442 kTypeArgumentsCid, compiler::target::kCompressedWordSize);
3444 loop_call_check += LoadLocal(
info.type_parameters);
3446 loop_call_check += LoadLocal(
info.vars->current_param_index);
3448 kArrayCid, compiler::target::kCompressedWordSize);
3451 loop_call_check += AssertSubtype(TokenPosition::kNoSource);
3454 Fragment loop_incr(
next);
3455 loop_incr += LoadLocal(
info.vars->current_param_index);
3459 loop_incr +=
Drop();
3460 loop_incr +=
Goto(loop);
3462 return Fragment(loop_init.entry,
done);
3465Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
3466 const ClosureCallInfo&
info,
3467 LocalVariable* param_index,
3469 const String& arg_name) {
3470 Fragment instructions;
3475 instructions += LoadLocal(
info.parameter_types);
3476 instructions += LoadLocal(param_index);
3478 kArrayCid, compiler::target::kCompressedWordSize);
3480 instructions += LoadLocal(
info.instantiator_type_args);
3482 instructions += LoadLocal(
info.vars->function_type_args);
3485 AssertAssignableInstr::kParameterCheck);
3488 instructions +=
Drop();
3490 return instructions;
3493Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3494 const ClosureCallInfo&
info) {
3495 Fragment instructions;
3499 for (intptr_t i = 1; i <
info.descriptor.PositionalCount(); i++) {
3504 instructions += BuildClosureCallArgumentTypeCheck(
3505 info, param_index, i, Symbols::dynamic_assert_assignable_stc_check());
3509 for (intptr_t i = 0; i <
info.descriptor.NamedCount(); i++) {
3510 const intptr_t arg_index =
info.descriptor.PositionAt(i);
3511 auto const param_index =
info.vars->named_argument_parameter_indices.At(i);
3514 instructions += BuildClosureCallArgumentTypeCheck(
3515 info, param_index, arg_index,
3516 Symbols::dynamic_assert_assignable_stc_check());
3519 return instructions;
3522Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3523 LocalVariable* closure) {
3529 body += LoadLocal(
info.closure);
3534 body += LoadLocal(
info.signature);
3535 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3537 Slot::FunctionType_packed_parameter_counts());
3540 body += LoadLocal(
info.signature);
3541 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3543 Slot::FunctionType_packed_parameter_counts());
3546 body += LoadLocal(
info.num_fixed_params);
3547 body += LoadLocal(
info.num_opt_params);
3551 body += LoadLocal(
info.signature);
3552 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3554 Slot::FunctionType_packed_parameter_counts());
3560 body += LoadLocal(
info.signature);
3564 body += LoadLocal(
info.signature);
3568 body += LoadLocal(
info.signature);
3572 body += LoadLocal(
info.closure);
3576 body += LoadLocal(
info.closure);
3583 body += BuildClosureCallArgumentsValidCheck(
info);
3588 Fragment not_generic;
3589 not_generic += LoadLocal(
info.parent_function_type_args);
3591 not_generic +=
Drop();
3599 generic += LoadLocal(
info.signature);
3600 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3602 Slot::FunctionType_packed_type_parameter_counts());
3606 generic += LoadLocal(
info.signature);
3607 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3609 Slot::FunctionType_packed_type_parameter_counts());
3613 generic += LoadLocal(
info.type_parameters);
3619 generic += BuildClosureCallDefaultTypeHandling(
info);
3622 generic += LoadLocal(
info.vars->function_type_args);
3624 generic += LoadLocal(
info.parent_function_type_args);
3626 generic += LoadLocal(
info.num_parent_type_args);
3628 generic += LoadLocal(
info.num_parent_type_args);
3629 generic += LoadLocal(
info.num_type_parameters);
3633 generic += StaticCall(TokenPosition::kNoSource,
3634 PrependTypeArgumentsFunction(), 4, ICData::kStatic);
3643 if (FLAG_eliminate_type_checks) {
3655 body += TestClosureFunctionGeneric(
info, generic, not_generic);
3659 body += BuildClosureCallArgumentTypeChecks(
info);
3676FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3686 const bool is_dynamic_call =
3688 if (is_dynamic_call) {
3696 const Class& closure_class =
3698 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3699 field_name.Equals(Symbols::call());
3707 PrologueInfo prologue_info(-1, -1);
3708 BlockEntryInstr* instruction_cursor =
3709 BuildPrologue(normal_entry, &prologue_info);
3711 Fragment body(instruction_cursor);
3712 body += CheckStackOverflowInPrologue(
function.token_pos());
3716 LocalVariable*
closure =
nullptr;
3717 if (is_closure_call) {
3719 if (is_dynamic_call) {
3724 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3726 body += BuildDynamicClosureCallChecks(closure);
3730 if (descriptor.TypeArgsLen() > 0) {
3732 ASSERT(type_args !=
nullptr);
3733 body += LoadLocal(type_args);
3736 if (is_closure_call) {
3738 body += LoadLocal(closure);
3742 const intptr_t kTypeArgsLen = 0;
3743 const intptr_t kNumArgsChecked = 1;
3745 kTypeArgsLen, 1, Array::null_array(), kNumArgsChecked);
3749 for (intptr_t
pos = 1;
pos < descriptor.Count();
pos++) {
3754 const Array* argument_names = &Object::null_array();
3755 if (descriptor.NamedCount() > 0) {
3756 const auto& array_handle =
3759 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
3760 const intptr_t named_arg_index =
3761 descriptor.PositionAt(i) - descriptor.PositionalCount();
3762 string_handle = descriptor.NameAt(i);
3763 array_handle.SetAt(named_arg_index, string_handle);
3765 argument_names = &array_handle;
3768 if (is_closure_call) {
3769 body += LoadLocal(closure);
3770 if (!FLAG_precompiled_mode) {
3774 body +=
ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3775 descriptor.TypeArgsLen(), descriptor.Count(),
3778 const intptr_t kNumArgsChecked = 1;
3781 is_dynamic_call ? Symbols::DynamicCall() : Symbols::
call(),
3782 Token::kILLEGAL, descriptor.TypeArgsLen(),
3783 descriptor.Count(), *argument_names, kNumArgsChecked);
3786 body += Return(TokenPosition::kNoSource);
3793FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3795 bool is_implicit_closure_function,
3796 bool throw_no_such_method_error) {
3803 PrologueInfo prologue_info(-1, -1);
3804 BlockEntryInstr* instruction_cursor =
3805 BuildPrologue(normal_entry, &prologue_info);
3807 Fragment body(instruction_cursor);
3808 body += CheckStackOverflowInPrologue(
function.token_pos());
3813 if (is_implicit_closure_function && !
function.is_static()) {
3823 kWordSize * compiler::target::frame_layout.param_end_from_fp);
3826 if (
function.NeedsTypeArgumentTypeChecks()) {
3831 if (
function.NeedsArgumentTypeChecks()) {
3832 BuildArgumentTypeChecks(&body, &body,
nullptr);
3841 body +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3847 otherwise +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3848 otherwise +=
Drop();
3852 if (
function.HasOptionalParameters()) {
3858 body += LoadLocal(argument_count_var);
3885 body +=
StoreLocal(TokenPosition::kNoSource, index);
3894 store += LoadLocal(arguments);
3896 store += LoadFunctionTypeArguments();
3904 TargetEntryInstr* body_entry;
3905 TargetEntryInstr* loop_exit;
3909 condition += LoadLocal(index);
3912 condition +=
BranchIfTrue(&body_entry, &loop_exit,
false);
3914 Fragment loop_body(body_entry);
3918 loop_body += LoadLocal(arguments);
3919 loop_body += LoadLocal(index);
3921 loop_body += LoadLocal(index);
3925 compiler::target::frame_layout.param_end_from_fp,
3930 loop_body += LoadLocal(index);
3933 loop_body +=
StoreLocal(TokenPosition::kNoSource, index);
3934 loop_body +=
Drop();
3937 loop_body +=
Goto(join);
3939 Fragment loop(join);
3942 Instruction* entry =
3944 body += Fragment(entry, loop_exit);
3948 if (is_implicit_closure_function) {
3949 if (throw_no_such_method_error) {
3950 const Function& parent =
3976 body += LoadLocal(arguments);
3978 if (throw_no_such_method_error) {
3979 const Function& parent =
3988 }
else if (
function.IsImplicitSetterFunction() ||
4000 if (
function.IsClosureFunction()) {
4004 then +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
4008 otherwise +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
4009 otherwise +=
Drop();
4011 body += LoadLocal(argument_count_var);
4016 const Class& mirror_class =
4018 ASSERT(!mirror_class.IsNull());
4019 const auto&
error = mirror_class.EnsureIsFinalized(
H.thread());
4023 Symbols::AllocateInvocationMirrorForClosure())));
4024 ASSERT(!allocation_function.IsNull());
4026 5, ICData::kStatic);
4028 if (throw_no_such_method_error) {
4032 const auto&
error = klass.EnsureIsFinalized(
H.thread());
4036 klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNewInvocation()));
4037 ASSERT(!throw_function.IsNull());
4038 body += StaticCall(TokenPosition::kNoSource, throw_function, 2,
4041 body += InstanceCall(
4042 TokenPosition::kNoSource, Symbols::NoSuchMethod(), Token::kILLEGAL,
4043 0, 2, Array::null_array(),
4053 if (!return_type.IsTopTypeForSubtyping()) {
4054 body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
4057 body += Return(TokenPosition::kNoSource);
4064Fragment FlowGraphBuilder::BuildDefaultTypeHandling(
const Function&
function) {
4065 Fragment keep_same, use_defaults;
4067 if (!
function.IsGeneric())
return keep_same;
4069 const auto& default_types =
4072 if (default_types.IsNull())
return keep_same;
4074 if (
function.IsClosureFunction()) {
4081 auto const mode =
function.default_type_arguments_instantiation_mode();
4085 use_defaults +=
Constant(default_types);
4088 use_defaults += LoadLocal(closure);
4093 use_defaults += LoadLocal(closure);
4101 use_defaults += LoadLocal(closure);
4107 if (!default_types.IsInstantiated(
kFunctions)) {
4108 use_defaults += LoadLocal(closure);
4118 use_defaults += TranslateInstantiatedTypeArguments(default_types);
4121 use_defaults +=
Drop();
4126FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
4127 Fragment shared_prologue_linked_in,
4128 Fragment skippable_checks,
4129 Fragment redefinitions_if_skipped,
4133 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
4137 Fragment normal_entry(shared_prologue_linked_in.entry);
4140 normal_entry +=
StoreLocal(TokenPosition::kNoSource,
4142 normal_entry +=
Drop();
4143 normal_entry +=
Goto(join_entry);
4146 Fragment extra_entry(extra_target_entry);
4149 extra_entry +=
StoreLocal(TokenPosition::kNoSource,
4151 extra_entry +=
Drop();
4152 extra_entry +=
Goto(join_entry);
4154 if (prologue_start !=
nullptr) {
4155 join_entry->LinkTo(prologue_start);
4158 shared_prologue_linked_in.current = join_entry;
4161 TargetEntryInstr* do_checks;
4162 TargetEntryInstr* skip_checks;
4163 shared_prologue_linked_in +=
4166 shared_prologue_linked_in +=
4170 shared_prologue_linked_in +=
4175 Fragment(do_checks) + skippable_checks +
Goto(rest_entry);
4176 Fragment(skip_checks) + redefinitions_if_skipped +
Goto(rest_entry);
4177 Fragment(rest_entry) + body;
4179 return extra_target_entry;
4182FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
4183 BlockEntryInstr* normal_entry,
4184 Fragment normal_prologue,
4185 Fragment extra_prologue,
4186 Fragment shared_prologue,
4191 Fragment
normal(normal_entry);
4194 normal += normal_prologue;
4197 Fragment extra(extra_entry);
4201 extra += extra_prologue;
4202 extra +=
Goto(join_entry);
4204 Fragment(join_entry) + shared_prologue + body;
4208FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
4214 (parent.num_fixed_parameters() !=
target.num_fixed_parameters())) {
4215 return BuildGraphOfNoSuchMethodForwarder(
function,
true,
4216 parent.is_static());
4225 PrologueInfo prologue_info(-1, -1);
4226 BlockEntryInstr* instruction_cursor =
4227 BuildPrologue(normal_entry, &prologue_info);
4229 Fragment
closure(instruction_cursor);
4243 intptr_t type_args_len = 0;
4245 if (
target.IsConstructor()) {
4247 ASSERT(result_type.IsFinalized());
4254 Z, Type::Cast(result_type).GetInstanceTypeArguments(
H.thread()));
4256 TranslateInstantiatedTypeArguments(instantiated_type_arguments);
4258 type_args_len =
function.NumTypeParameters();
4262 }
else if (
target.IsFactory()) {
4269 if (
target.IsGenerativeConstructor()) {
4271 if (cls.NumTypeArguments() > 0) {
4274 Z, cls.GetDeclarationInstanceTypeArguments()));
4282 closure += LoadLocal(receiver);
4283 }
else if (!
target.is_static()) {
4294 target.NumImplicitParameters();
4297 Array& argument_names =
4301 argument_names, ICData::kNoRebind,
4302 nullptr, type_args_len);
4304 if (
target.IsGenerativeConstructor()) {
4318FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
4321 function.IsDynamicInvocationForwarder());
4332 const bool is_method = !
function.IsStaticFunction();
4333 const bool is_setter =
target.IsImplicitSetterFunction();
4334 const bool is_getter =
target.IsImplicitGetterFunction() ||
4335 target.IsImplicitStaticGetterFunction();
4336 ASSERT(is_setter || is_getter);
4346 Fragment body(normal_entry);
4348 auto const setter_value =
4353 body += LoadLocal(setter_value);
4359 const bool needs_type_check =
function.IsDynamicInvocationForwarder() ||
4360 setter_value->needs_type_check();
4361 if (needs_type_check) {
4362 body += CheckAssignable(setter_value->static_type(), setter_value->name(),
4363 AssertAssignableInstr::kParameterCheck,
4366 if (field.is_late()) {
4371 body += StoreLateField(
4387 field, field.NeedsInitializationCheckOnLoad());
4388 }
else if (field.is_const()) {
4390 if (
value.IsError()) {
4402 ASSERT(field.has_nontrivial_initializer() ||
4403 (field.is_late() && !field.has_initializer()));
4407 if (is_method || !field.is_const()) {
4414 Fragment load_guard = CheckAssignable(
4416 if (field.needs_load_guard()) {
4423 body += Return(TokenPosition::kNoSource);
4425 PrologueInfo prologue_info(-1, -1);
4431FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
4438 if (
target.IsImplicitSetterFunction() ||
target.IsImplicitGetterFunction()) {
4439 return BuildGraphOfFieldAccessor(
function);
4441 if (
target.IsMethodExtractor()) {
4442 return BuildGraphOfMethodExtractor(
target);
4445 return BuildGraphOfRecognizedMethod(
function);
4448 graph_entry_ =
new (
Z) GraphEntryInstr(*parsed_function_,
osr_id_);
4453 PrologueInfo prologue_info(-1, -1);
4454 auto instruction_cursor = BuildPrologue(normal_entry, &prologue_info);
4458 body += CheckStackOverflowInPrologue(
function.token_pos());
4469 body += BuildDefaultTypeHandling(
function);
4473 BuildTypeArgumentTypeChecks(
4475 BuildArgumentTypeChecks(&body, &body,
nullptr);
4479 intptr_t type_args_len = 0;
4481 type_args_len =
function.NumTypeParameters();
4492 const auto& argument_names =
4496 argument_names, ICData::kNoRebind,
nullptr, type_args_len);
4498 if (
target.has_unboxed_integer_return()) {
4499 body +=
Box(kUnboxedInt64);
4500 }
else if (
target.has_unboxed_double_return()) {
4501 body +=
Box(kUnboxedDouble);
4502 }
else if (
target.has_unboxed_record_return()) {
4511 if (
name.ptr() == Symbols::AssignIndexToken().ptr()) {
4516 body += Return(TokenPosition::kNoSource);
4518 instruction_cursor->LinkTo(body.entry);
4532void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4533 const Fragment& fragment,
4536 ASSERT(fragment.current->IsDefinition());
4539 fragment.current->AsDefinition()->set_range(range);
4543 switch (unboxed_representation) {
4547 return kTypedDataFloat32ArrayCid;
4549 return kTypedDataInt32ArrayCid;
4550 case kUnboxedUint32:
4551 return kTypedDataUint32ArrayCid;
4553 return kTypedDataInt64ArrayCid;
4554 case kUnboxedDouble:
4555 return kTypedDataFloat64ArrayCid;
4562Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4564 intptr_t index_scale,
4565 bool index_unboxed) {
4566 ASSERT(unboxed_representation == kUnboxedInt32 ||
4567 unboxed_representation == kUnboxedUint32 ||
4568 unboxed_representation == kUnboxedInt64 ||
4569 unboxed_representation == kUnboxedFloat ||
4570 unboxed_representation == kUnboxedDouble);
4572 if (unboxed_representation == kUnboxedFloat) {
4573 fragment += BitCast(kUnboxedFloat, kUnboxedInt32);
4574 unboxed_representation = kUnboxedInt32;
4577 index_scale, index_unboxed);
4581Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4583 intptr_t index_scale,
4584 bool index_unboxed) {
4585 ASSERT(unboxed_representation == kUnboxedInt32 ||
4586 unboxed_representation == kUnboxedUint32 ||
4587 unboxed_representation == kUnboxedInt64 ||
4588 unboxed_representation == kUnboxedFloat ||
4589 unboxed_representation == kUnboxedDouble);
4591 if (unboxed_representation == kUnboxedFloat) {
4592 representation_for_load = kUnboxedInt32;
4596 index_scale, index_unboxed);
4597 if (unboxed_representation == kUnboxedFloat) {
4598 fragment += BitCast(kUnboxedInt32, kUnboxedFloat);
4603Fragment FlowGraphBuilder::UnhandledException() {
4605 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4613 body += LoadLocal(error_instance);
4614 body += LoadLocal(CurrentException());
4619 body += LoadLocal(error_instance);
4620 body += LoadLocal(CurrentStackTrace());
4629 auto const unbox_to = to == kUnboxedFloat ? kUnboxedDouble : to;
4630 Fragment instructions;
4633 instructions <<= unbox;
4635 if (to == kUnboxedFloat) {
4638 return instructions;
4641Fragment FlowGraphBuilder::LoadThread() {
4642 LoadThreadInstr* instr =
new (
Z) LoadThreadInstr();
4644 return Fragment(instr);
4647Fragment FlowGraphBuilder::LoadIsolate() {
4649 body += LoadThread();
4654Fragment FlowGraphBuilder::LoadIsolateGroup() {
4656 body += LoadThread();
4661Fragment FlowGraphBuilder::LoadObjectStore() {
4663 body += LoadIsolateGroup();
4668Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4670 body += LoadThread();
4676Fragment FlowGraphBuilder::BoolToInt() {
4682 Fragment instructions;
4683 TargetEntryInstr* is_true;
4684 TargetEntryInstr* is_false;
4690 Fragment store_1(is_true);
4692 store_1 +=
StoreLocal(TokenPosition::kNoSource, expression_temp);
4694 store_1 +=
Goto(join);
4698 Fragment store_0(is_false);
4700 store_0 +=
StoreLocal(TokenPosition::kNoSource, expression_temp);
4702 store_0 +=
Goto(join);
4705 instructions = Fragment(instructions.entry, join);
4706 instructions += LoadLocal(expression_temp);
4707 return instructions;
4710Fragment FlowGraphBuilder::IntToBool() {
4717Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4722 RelationalOpInstr* instr =
new (
Z) RelationalOpInstr(
4723 InstructionSource(position), kind,
left,
right, kMintCid,
4726 return Fragment(instr);
4728 const String*
name =
nullptr;
4737 name = &Symbols::LessEqualOperator();
4740 name = &Symbols::GreaterEqualOperator();
4745 return InstanceCall(
4746 position, *
name, kind, 0, 2,
4747 Array::null_array(), 2);
4750Fragment FlowGraphBuilder::NativeReturn(
4751 const compiler::ffi::CallbackMarshaller& marshaller) {
4752 const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
4753 if (num_return_defs == 1) {
4754 auto* instr =
new (
Z) NativeReturnInstr(
Pop(), marshaller);
4755 return Fragment(instr).closed();
4759 auto* typed_data_base =
Pop();
4760 auto* instr =
new (
Z) NativeReturnInstr(typed_data_base,
offset, marshaller);
4761 return Fragment(instr).closed();
4765 BitCastInstr* instr =
new (
Z) BitCastInstr(from, to,
Pop());
4767 return Fragment(instr);
4770Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4772 Call1ArgStubInstr* instr =
new (
Z) Call1ArgStubInstr(
4775 return Fragment(instr);
4778Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4783 SuspendInstr* instr =
4784 new (
Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4787 return Fragment(instr);
4790Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4791 const AbstractType& compound_type) {
4792 const auto& compound_sub_class =
4794 compound_sub_class.EnsureIsFinalized(thread_);
4799 LocalVariable* typed_data =
MakeTemporary(
"typed_data_base");
4800 body +=
AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4802 body += LoadLocal(compound);
4803 body += LoadLocal(typed_data);
4806 body += LoadLocal(compound);
4814Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4822Fragment FlowGraphBuilder::LoadOffsetInBytesFromCompound() {
4830Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4831 ZoneGrowableArray<LocalVariable*>* definitions,
4832 const GrowableArray<Representation>& representations) {
4834 const intptr_t num_defs = representations.length();
4835 ASSERT(definitions->length() == num_defs);
4838 int offset_in_bytes = 0;
4839 for (intptr_t i = 0; i < num_defs; i++) {
4841 body += LoadLocal(uint8_list);
4843 body += LoadLocal(definitions->At(i));
4844 body += StoreIndexedTypedDataUnboxed(representation, 1,
4854 if (bytes_left >= 8 && compiler::target::kWordSize == 8) {
4857 if (bytes_left >= 4) {
4860 if (bytes_left >= 2) {
4869 return kTypedDataInt64ArrayCid;
4871 return kTypedDataInt32ArrayCid;
4873 return kTypedDataInt16ArrayCid;
4875 return kTypedDataInt8ArrayCid;
4886 return kExternalTypedDataInt64ArrayCid;
4888 return kExternalTypedDataInt32ArrayCid;
4890 return kExternalTypedDataInt16ArrayCid;
4892 return kExternalTypedDataInt8ArrayCid;
4899 intptr_t offset_in_bytes,
4902 if (size == 8 || size == 4) {
4903 body += LoadLocal(variable);
4904 body += LoadTypedDataBaseFromCompound();
4905 body += LoadLocal(variable);
4906 body += LoadOffsetInBytesFromCompound();
4909 body += LoadIndexedTypedDataUnboxed(representation, 1,
4913 ASSERT(representation != kUnboxedFloat);
4914 ASSERT(representation != kUnboxedDouble);
4916 intptr_t remaining = size;
4917 auto step = [&](intptr_t part_bytes, intptr_t part_cid) {
4918 while (remaining >= part_bytes) {
4919 body += LoadLocal(variable);
4920 body += LoadTypedDataBaseFromCompound();
4921 body += LoadLocal(variable);
4922 body += LoadOffsetInBytesFromCompound();
4936 offset_in_bytes += part_bytes;
4937 remaining -= part_bytes;
4941 step(8, kTypedDataUint64ArrayCid);
4942 step(4, kTypedDataUint32ArrayCid);
4943 step(2, kTypedDataUint16ArrayCid);
4944 step(1, kTypedDataUint8ArrayCid);
4950 if (from_representation != representation) {
4951 IntConverterInstr*
convert =
new IntConverterInstr(
4961Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4962 LocalVariable* variable,
4963 const compiler::ffi::BaseMarshaller& marshaller,
4964 intptr_t arg_index) {
4966 const auto& native_loc = marshaller.Location(arg_index);
4967 if (native_loc.IsMultiple()) {
4968 const auto& multiple_loc = native_loc.AsMultiple();
4969 intptr_t offset_in_bytes = 0;
4970 for (intptr_t i = 0; i < multiple_loc.locations().
length(); i++) {
4971 const auto& loc = *multiple_loc.locations()[i];
4973 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
4976 representation = loc.container_type().AsRepresentationOverApprox(
Z);
4980 representation = loc.payload_type().AsRepresentationOverApprox(
Z);
4982 intptr_t
size = loc.payload_type().SizeInBytes();
4983 body += LoadTail(variable, size, offset_in_bytes, representation);
4984 offset_in_bytes +=
size;
4986 }
else if (native_loc.IsStack()) {
4990 intptr_t remaining = native_loc.payload_type().SizeInBytes();
4991 intptr_t offset_in_bytes = 0;
4992 while (remaining >= compiler::target::kWordSize) {
4993 body += LoadTail(variable, compiler::target::kWordSize, offset_in_bytes,
4995 offset_in_bytes += compiler::target::kWordSize;
4996 remaining -= compiler::target::kWordSize;
4998 if (remaining > 0) {
4999 body += LoadTail(variable, remaining, offset_in_bytes, representation);
5002 ASSERT(native_loc.IsPointerToMemory());
5004 body += LoadLocal(variable);
5005 body += LoadTypedDataBaseFromCompound();
5006 body += LoadLocal(variable);
5007 body += LoadOffsetInBytesFromCompound();
5013Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
5014 const compiler::ffi::BaseMarshaller& marshaller,
5015 intptr_t arg_index) {
5019 const auto& compound_type =
5021 body += WrapTypedDataBaseInCompound(compound_type);
5025Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
5026 const compiler::ffi::BaseMarshaller& marshaller,
5028 ZoneGrowableArray<LocalVariable*>* definitions) {
5029 const intptr_t length_in_bytes =
5030 marshaller.Location(arg_index).payload_type().SizeInBytes();
5033 if (marshaller.Location(arg_index).IsMultiple()) {
5039 const auto& multiple_loc = marshaller.Location(arg_index).AsMultiple();
5040 const intptr_t num_defs = multiple_loc.locations().length();
5041 intptr_t offset_in_bytes = 0;
5042 for (intptr_t i = 0; i < num_defs; i++) {
5043 const auto& loc = *multiple_loc.locations()[i];
5045 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
5048 representation = loc.container_type().AsRepresentationOverApprox(
Z);
5052 representation = loc.payload_type().AsRepresentationOverApprox(
Z);
5054 body += LoadLocal(uint8_list);
5056 body += LoadLocal(definitions->At(i));
5057 body += StoreIndexedTypedDataUnboxed(representation, 1,
5059 offset_in_bytes += loc.payload_type().SizeInBytes();
5063 }
else if (marshaller.Location(arg_index).IsStack()) {
5068 GrowableArray<Representation> representations;
5069 marshaller.RepsInFfiCall(arg_index, &representations);
5070 body += PopFromStackToTypedDataBase(definitions, representations);
5072 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
5074 LocalVariable* address_of_compound =
MakeTemporary(
"address_of_compound");
5078 LocalVariable* typed_data_base =
MakeTemporary(
"typed_data_base");
5079 intptr_t offset_in_bytes = 0;
5080 while (offset_in_bytes < length_in_bytes) {
5081 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5082 const intptr_t chunk_sizee =
chunk_size(bytes_left);
5084 body += LoadLocal(address_of_compound);
5091 body += LoadLocal(typed_data_base);
5093 body += LoadLocal(chunk_value);
5099 offset_in_bytes += chunk_sizee;
5101 ASSERT(offset_in_bytes == length_in_bytes);
5105 const auto& compound_type =
5107 body += WrapTypedDataBaseInCompound(compound_type);
5111Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
5112 const compiler::ffi::CallbackMarshaller& marshaller,
5113 intptr_t arg_index) {
5115 const auto& native_loc = marshaller.Location(arg_index);
5116 if (native_loc.IsMultiple()) {
5120 body += LoadLocal(compound);
5121 body += LoadOffsetInBytesFromCompound();
5126 body += LoadTypedDataBaseFromCompound();
5129 ASSERT(native_loc.IsPointerToMemory());
5131 const intptr_t length_in_bytes =
5132 marshaller.Location(arg_index).payload_type().SizeInBytes();
5135 body += LoadLocal(compound);
5136 body += LoadTypedDataBaseFromCompound();
5137 LocalVariable* typed_data_base =
MakeTemporary(
"typed_data_base");
5138 body += LoadLocal(compound);
5139 body += LoadOffsetInBytesFromCompound();
5142 auto* pointer_to_return =
5144 Push(pointer_to_return);
5145 body <<= pointer_to_return;
5146 LocalVariable* unboxed_address =
MakeTemporary(
"unboxed_address");
5148 intptr_t offset_in_bytes = 0;
5149 while (offset_in_bytes < length_in_bytes) {
5150 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5151 const intptr_t chunk_sizee =
chunk_size(bytes_left);
5153 body += LoadLocal(typed_data_base);
5154 body += LoadLocal(
offset);
5161 body += LoadLocal(unboxed_address);
5163 body += LoadLocal(chunk_value);
5169 offset_in_bytes += chunk_sizee;
5172 ASSERT(offset_in_bytes == length_in_bytes);
5178Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
5179 const compiler::ffi::BaseMarshaller& marshaller,
5180 intptr_t arg_index) {
5181 ASSERT(!marshaller.IsCompoundCType(arg_index));
5184 if (marshaller.IsPointerPointer(arg_index)) {
5185 Class& result_class =
5188 result_class.EnsureIsFinalized(thread_);
5190 TypeArguments&
args =
5203 body +=
AllocateObject(TokenPosition::kNoSource, result_class, 1);
5205 body += LoadLocal(address);
5210 body += LoadLocal(
result);
5211 }
else if (marshaller.IsTypedDataPointer(arg_index)) {
5213 }
else if (marshaller.IsCompoundPointer(arg_index)) {
5215 }
else if (marshaller.IsHandleCType(arg_index)) {
5219 }
else if (marshaller.IsVoid(arg_index)) {
5225 if (marshaller.RequiresBitCast(arg_index)) {
5227 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)),
5228 marshaller.RepInDart(arg_index));
5231 body +=
Box(marshaller.RepInDart(arg_index));
5233 if (marshaller.IsBool(arg_index)) {
5234 body += IntToBool();
5240Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
5241 const compiler::ffi::BaseMarshaller& marshaller,
5243 LocalVariable* variable) {
5244 ASSERT(!marshaller.IsCompoundCType(arg_index));
5247 if (marshaller.IsPointerPointer(arg_index)) {
5251 }
else if (marshaller.IsTypedDataPointer(arg_index)) {
5253 }
else if (marshaller.IsCompoundPointer(arg_index)) {
5254 ASSERT(variable !=
nullptr);
5255 body += LoadTypedDataBaseFromCompound();
5256 body += LoadLocal(variable);
5257 body += LoadOffsetInBytesFromCompound();
5259 }
else if (marshaller.IsHandleCType(arg_index)) {
5266 auto*
const arg_reps =
5267 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5270 body += LoadThread();
5272 arg_reps->Add(kUntagged);
5276 CallLeafRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
5281 body += LoadLocal(handle);
5282 body += LoadLocal(
object);
5287 }
else if (marshaller.IsVoid(arg_index)) {
5293 if (marshaller.IsBool(arg_index)) {
5294 body += BoolToInt();
5297 body += UnboxTruncate(marshaller.RepInDart(arg_index));
5300 if (marshaller.RequiresBitCast(arg_index)) {
5302 marshaller.RepInDart(arg_index),
5303 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)));
5309FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
5311 switch (
function.GetFfiCallbackKind()) {
5314 return BuildGraphOfSyncFfiCallback(
function);
5316 return BuildGraphOfAsyncFfiCallback(
function);
5322Fragment FlowGraphBuilder::FfiNativeLookupAddress(
5326 .Equals(Symbols::FfiNative()));
5327 const auto& native_class_fields =
Array::Handle(
Z, native_class.fields());
5328 ASSERT(native_class_fields.Length() == 4);
5329 const auto& symbol_field =
5331 ASSERT(!symbol_field.is_static());
5332 const auto& asset_id_field =
5334 ASSERT(!asset_id_field.is_static());
5335 const auto& symbol =
5337 const auto& asset_id =
5340 ASSERT(type_args.Length() == 1);
5343 if (native_type.IsFunctionType()) {
5344 const auto& native_function_type = FunctionType::Cast(native_type);
5345 arg_n = native_function_type.NumParameters() -
5346 native_function_type.num_implicit_parameters();
5351 const auto& ffi_resolver =
5353#if !defined(TARGET_ARCH_IA32)
5360 CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
5362 Array::null_array(),
5368 char*
error =
nullptr;
5369#if !defined(DART_PRECOMPILER) || defined(TESTING)
5370 const uintptr_t function_address =
5373 const uintptr_t function_address = 0;
5376 if (
error ==
nullptr) {
5391 body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
5392 3, ICData::kStatic);
5400Fragment FlowGraphBuilder::FfiNativeFunctionBody(
const Function&
function) {
5405 const auto& c_signature =
5407 auto const& native_instance =
5411 body += FfiNativeLookupAddress(native_instance);
5412 body += FfiCallFunctionBody(
function, c_signature,
5417Fragment FlowGraphBuilder::FfiCallFunctionBody(
5419 const FunctionType& c_signature,
5420 intptr_t first_argument_parameter_offset) {
5427 const char*
error =
nullptr;
5429 Z,
function, first_argument_parameter_offset, c_signature, &
error);
5434 const auto& marshaller = *marshaller_ptr;
5436 const bool signature_contains_handles = marshaller.ContainsHandles();
5444 const intptr_t num_args = marshaller.num_args();
5445 for (intptr_t i = 0; i < num_args; i++) {
5446 if (marshaller.IsHandleCType(i)) {
5450 first_argument_parameter_offset + i));
5455 Z,
function.ParameterNameAt(first_argument_parameter_offset + i)),
5459 first_argument_parameter_offset + i));
5463 intptr_t try_handler_index = -1;
5464 if (signature_contains_handles) {
5468 body += TryCatch(try_handler_index);
5475 auto*
const arg_reps =
5476 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5478 body += LoadThread();
5479 arg_reps->Add(kUntagged);
5481 body += CallLeafRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged,
5486 LocalVariable* return_compound_typed_data =
nullptr;
5487 if (marshaller.ReturnsCompound()) {
5488 body +=
IntConstant(marshaller.CompoundReturnSizeInBytes());
5495 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5496 if (marshaller.IsCompoundCType(i)) {
5497 body += FfiCallConvertCompoundArgumentToNative(
5503 first_argument_parameter_offset + i));
5507 if (!marshaller.IsHandleCType(i)) {
5508 body += FfiConvertPrimitiveToNative(
5511 first_argument_parameter_offset + i));
5516 body += LoadLocal(address);
5518 if (marshaller.ReturnsCompound()) {
5519 body += LoadLocal(return_compound_typed_data);
5522 body += FfiCall(marshaller,
function.FfiIsLeaf());
5524 const intptr_t num_defs = marshaller.NumReturnDefinitions();
5526 auto defs =
new (
Z) ZoneGrowableArray<LocalVariable*>(
Z, num_defs);
5530 if (marshaller.ReturnsCompound()) {
5536 body += FfiCallConvertCompoundReturnToDart(marshaller,
5542 auto exit_handle_scope = [&]() -> Fragment {
5544 auto*
const arg_reps =
5545 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5547 code += LoadThread();
5548 arg_reps->Add(kUntagged);
5550 code += CallLeafRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged,
5556 if (signature_contains_handles) {
5560 body += exit_handle_scope();
5564 body += Return(TokenPosition::kNoSource);
5566 if (signature_contains_handles) {
5569 Fragment catch_body =
5570 CatchBlockEntry(Array::empty_array(), try_handler_index,
5577 catch_body += exit_handle_scope();
5579 catch_body += LoadLocal(CurrentException());
5580 catch_body += LoadLocal(CurrentStackTrace());
5581 catch_body += RethrowException(TokenPosition::kNoSource, try_handler_index);
5588Fragment FlowGraphBuilder::LoadNativeArg(
5589 const compiler::ffi::CallbackMarshaller& marshaller,
5590 intptr_t arg_index) {
5591 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
5592 auto defs =
new (
Z) ZoneGrowableArray<LocalVariable*>(
Z, num_defs);
5595 for (intptr_t j = 0; j < num_defs; j++) {
5596 const intptr_t def_index = marshaller.DefinitionIndex(j, arg_index);
5597 auto* parameter =
new (
Z) NativeParameterInstr(marshaller, def_index);
5599 fragment <<= parameter;
5604 if (marshaller.IsCompoundCType(arg_index)) {
5606 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index, defs);
5608 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
5613FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
5615 const char*
error =
nullptr;
5616 const auto marshaller_ptr =
5622 const auto& marshaller = *marshaller_ptr;
5623 const bool is_closure =
function.GetFfiCallbackKind() ==
5629 auto*
const native_entry =
5635 Fragment function_body(native_entry);
5636 function_body += CheckStackOverflowInPrologue(
function.token_pos());
5641 Fragment body = TryCatch(try_handler_index);
5644 LocalVariable*
closure =
nullptr;
5647 body += LoadThread();
5649 LoadUntagged(compiler::target::Thread::unboxed_runtime_arg_offset());
5655 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5656 body += LoadNativeArg(marshaller, i);
5663 body += LoadLocal(closure);
5664 if (!FLAG_precompiled_mode) {
5671 ClosureCall(Function::null_function(), TokenPosition::kNoSource,
5678 body += StaticCall(TokenPosition::kNoSource,
5680 marshaller.num_args(), Array::empty_array(),
5691 body += FfiCallbackConvertCompoundReturnToNative(
5698 body += NativeReturn(marshaller);
5701 function_body += body;
5704 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5719 catch_body += UnhandledException();
5725 const intptr_t
size =
5729 compiler::target::kWordSize);
5733 catch_body += WrapTypedDataBaseInCompound(
5735 catch_body += FfiCallbackConvertCompoundReturnToNative(
5745 catch_body += NativeReturn(marshaller);
5748 PrologueInfo prologue_info(-1, -1);
5754FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5756 const char*
error =
nullptr;
5757 const auto marshaller_ptr =
5763 const auto& marshaller = *marshaller_ptr;
5772 auto*
const native_entry =
5778 Fragment function_body(native_entry);
5779 function_body += CheckStackOverflowInPrologue(
function.token_pos());
5784 Fragment body = TryCatch(try_handler_index);
5792 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5793 body += LoadLocal(array);
5795 body += LoadNativeArg(marshaller, i);
5800 body += Call1ArgStub(TokenPosition::kNoSource,
5805 body += NativeReturn(marshaller);
5808 function_body += body;
5811 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5821 catch_body += NativeReturn(marshaller);
5824 PrologueInfo prologue_info(-1, -1);
5830void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5831 try_catch_block_ = try_catch_block;
5833 : try_catch_block->try_index());
5836const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5837 if (prepend_type_arguments_.
IsNull()) {
5839 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5840 Symbols::PrependTypeArguments());
5843 return prepend_type_arguments_;
5846Fragment FlowGraphBuilder::BuildIntegerHashCode(
bool smi) {
5849 HashIntegerOpInstr*
hash =
5856Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5862 body +=
Box(kUnboxedInt64);
5871 intptr_t case_count)
5873 position_(position),
5874 is_exhaustive_(is_exhaustive),
5875 expression_type_(expression_type),
5876 switch_block_(switch_block),
5877 case_count_(case_count),
5878 case_bodies_(case_count),
5879 case_expression_counts_(case_count),
5880 expressions_(case_count),
5881 sorted_expressions_(case_count) {
5886 is_optimizable_ =
true;
5890 is_optimizable_ =
true;
5891 is_enum_switch_ =
true;
5900 const uint64_t diff =
static_cast<uint64_t
>(
max) -
static_cast<uint64_t
>(
min);
5902 if (diff >
static_cast<uint64_t
>(
kMaxInt64 - 1)) {
5905 return static_cast<int64_t
>(diff + 1);
5930 const intptr_t kJumpTableMinExpressions = 16;
5933 const intptr_t kJumpTableMaxSize =
kMaxInt32;
5937 const double kJumpTableMaxHolesRatio = 1.0;
5954 PrepareForOptimizedSwitch();
5967 if (range > kJumpTableMaxSize) {
5971 const intptr_t num_expressions =
expressions().length();
5972 ASSERT(num_expressions <= range);
5974 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5975 const int64_t holes = range - num_expressions;
5978 if (num_expressions < kJumpTableMinExpressions) {
5982 if (holes > max_holes) {
6012 kJumpTableMaxSize - range);
6015 if (required_holes <= holes_budget) {
6016 expression_min_ = &Object::smi_zero();
6023void SwitchHelper::PrepareForOptimizedSwitch() {
6026 const Field* enum_index_field =
nullptr;
6027 for (intptr_t i = 0; i < expressions_.length(); ++i) {
6029 sorted_expressions_.Add(&expression);
6032 const Integer* integer =
nullptr;
6034 if (enum_index_field ==
nullptr) {
6041 integer = &Integer::Cast(value);
6045 expression_min_ = integer;
6046 expression_max_ = integer;
6049 expression_min_ = integer;
6052 expression_max_ = integer;
6058 sorted_expressions_.Sort(
6059 [](SwitchExpression*
const*
a, SwitchExpression*
const*
b) {
6060 return (*a)->integer().CompareWith((*b)->integer());
6066 for (intptr_t i = 0; i < sorted_expressions_.length() - 1; ++i) {
6067 const SwitchExpression&
a = *sorted_expressions_.At(i);
6068 const SwitchExpression&
b = *sorted_expressions_.At(i + 1);
6069 if (
a.integer().Equals(
b.integer())) {
6070 is_optimizable_ =
false;
6079 case_expression_counts_[case_index]++;
6083 if (is_optimizable_) {
6085 if (!
value.IsInstanceOf(expression_type_, Object::null_type_arguments(),
6086 Object::null_type_arguments())) {
6087 is_optimizable_ =
false;
static int step(int x, SkScalar min, SkScalar max)
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static bool match(const char *needle, const char *haystack)
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static bool equal(const SkBitmap &a, const SkBitmap &b)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
void check_bounds(skiatest::Reporter *reporter, const SkPath &path)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define ASSERT_NOTNULL(ptr)
#define CLASS_LIST_TYPED_DATA(V)
#define DART_CLASS_LIST_TYPED_DATA(V)
virtual bool HasTypeClass() const
Nullability nullability() const
virtual ClassPtr type_class() const
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
void FillWith(const T &value, intptr_t start, intptr_t length)
static const Bool & False()
static const Bool & True()
@ kDeeplyImmutableAttachNativeFinalizer
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
UntaggedClosureData::PackedInstantiationMode PackedInstantiationMode
static CompileType Dynamic()
intptr_t GetNextDeoptId()
const Function & TypedListGetFloat32()
static CompilerState & Current()
const Function & TypedListSetFloat32()
static constexpr intptr_t kNoOSRDeoptId
static constexpr intptr_t kNone
static intptr_t InputCountForMarshaller(const compiler::ffi::CallMarshaller &marshaller)
static StringPtr GetterSymbol(const String &field_name)
static bool IsGetterName(const String &function_name)
static StringPtr NameFromGetter(const String &getter_name)
static bool SupportsUnboxedDoubles()
static bool SupportsUnboxedSimd128()
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
UntaggedFunctionType::PackedNumOptionalParameters PackedNumOptionalParameters
UntaggedFunctionType::PackedNumFixedParameters PackedNumFixedParameters
UntaggedFunctionType::PackedHasNamedOptionalParameters PackedHasNamedOptionalParameters
static bool IsDynamicInvocationForwarderName(const String &name)
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
bool IsClosureFunction() const
KernelProgramInfoPtr KernelProgramInfo() const
intptr_t NumParameters() const
const char * ToLibNamePrefixedQualifiedCString() const
static bool UseUnboxedRepresentation()
void RelinkToOsrEntry(Zone *zone, intptr_t max_block_id)
void AddCatchEntry(CatchBlockEntryInstr *entry)
FunctionEntryInstr * normal_entry() const
void set_normal_entry(FunctionEntryInstr *entry)
ObjectPtr GetField(const Field &field) const
static intptr_t ElementSizeFor(intptr_t cid)
virtual TypeArgumentsPtr GetTypeArguments() const
virtual Representation representation() const
virtual int CompareWith(const Integer &other) const
virtual int64_t AsInt64Value() const
static int EncodeType(Level level, Kind kind)
ClassTable * class_table() const
static LeafRuntimeCallInstr * Make(Zone *zone, Representation return_representation, const ZoneGrowableArray< Representation > &argument_representations, InputsArray &&inputs)
static ClassPtr LookupCoreClass(const String &class_name)
static const String & PrivateCoreLibName(const String &member)
static LibraryPtr InternalLibrary()
static Representation ReturnRepresentation(intptr_t array_cid)
int num_context_variables() const
const AbstractType & static_type() const
const String & name() const
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static intptr_t MethodKindToReceiverCid(Kind kind)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
bool has_entry_points_temp_var() const
LocalVariable * expression_temp_var() const
const Function & function() const
LocalVariable * RawTypeArgumentsVariable() const
LocalScope * scope() const
const Function * forwarding_stub_super_target() const
bool has_receiver_var() const
LocalVariable * entry_points_temp_var() const
bool has_arg_desc_var() const
bool is_forwarding_stub() const
DynamicClosureCallVars * dynamic_closure_call_vars() const
LocalVariable * ParameterVariable(intptr_t i) const
LocalVariable * current_context_var() const
LocalVariable * RawParameterVariable(intptr_t i) const
LocalVariable * receiver_var() const
LocalVariable * function_type_arguments() const
static RangeBoundary FromConstant(int64_t val)
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
static DART_NORETURN void LongJump(const Error &error)
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add=true)
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
static SmiPtr New(intptr_t value)
static Representation ValueRepresentation(intptr_t array_cid)
static const String & LAngleBracket()
static const String & RAngleBracket()
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
static const String & Empty()
static StringPtr New(Thread *thread, const char *cstr)
static const String & Dot()
static bool double_truncate_round_supported()
static Thread * Current()
CompilerState & compiler_state()
IsolateGroup * isolate_group() const
static TokenPosition Synthetic(intptr_t value)
static const TokenPosition kMinSource
static constexpr intptr_t kFlagsPerSmiShift
static constexpr intptr_t kFlagsPerSmiMask
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
static TypePtr DynamicType()
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
BitField< decltype(packed_type_parameter_counts_), uint8_t, PackedNumParentTypeArguments::kNextBit, 8 > PackedNumTypeParameters
BitField< decltype(packed_type_parameter_counts_), uint8_t, 0, 8 > PackedNumParentTypeArguments
static constexpr int ShiftForPowerOfTwo(T x)
static T Minimum(T x, T y)
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static CallMarshaller * FromFunction(Zone *zone, const Function &function, intptr_t function_params_start_at, const FunctionType &c_signature, const char **error)
static CallbackMarshaller * FromFunction(Zone *zone, const Function &function, const char **error)
intptr_t ClassNumTypeArguments()
Fragment IntConstant(int64_t value)
Fragment SmiRelationalOp(Token::Kind kind)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment GenericCheckBound()
Definition * Peek(intptr_t depth=0)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment ConvertUnboxedToUntagged()
Fragment LoadContextAt(int depth)
Fragment DebugStepCheck(TokenPosition position)
const Array & saved_args_desc_array()
Fragment CalculateElementAddress(intptr_t index_scale)
intptr_t last_used_block_id_
JoinEntryInstr * BuildThrowNoSuchMethod()
InputsArray GetArguments(int count)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
Fragment LoadArgDescriptor()
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
JoinEntryInstr * BuildJoinEntry()
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment StoreFpRelativeSlot(intptr_t offset)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
intptr_t AllocateBlockId()
Fragment StoreStaticField(TokenPosition position, const Field &field)
void InlineBailout(const char *reason)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
void FinalizeCoverageArray()
Fragment LoadUntagged(intptr_t offset)
Fragment TailCall(const Code &code)
Fragment AssertBool(TokenPosition position)
Fragment InstantiateDynamicTypeArguments()
Fragment BuildEntryPointsIntrospection()
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
void SetCurrentTryIndex(intptr_t try_index)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
intptr_t AllocateTryIndex()
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment Return(TokenPosition position)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
intptr_t GetNextDeoptId()
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment UnaryDoubleOp(Token::Kind op)
const Array & coverage_array() const
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment Constant(const Object &value)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
void Push(Definition *definition)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
intptr_t CurrentTryIndex() const
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment ConvertUntaggedToUnboxed()
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
virtual ~FlowGraphBuilder()
FlowGraphBuilder(ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, ZoneGrowableArray< intptr_t > *context_level_array, InlineExitCollector *exit_collector, bool optimizing, intptr_t osr_id, intptr_t first_block_id=1, bool inlining_unchecked_entry=false)
static bool IsExpressionTempVarUsedInRecognizedMethodFlowGraph(const Function &function)
static bool IsRecognizedMethodForFlowGraph(const Function &function)
void Prepend(Instruction *start)
LocalVariable * type_arguments_variable
IntMap< LocalScope * > scopes
IntMap< LocalVariable * > locals
intptr_t num_ast_nodes() const
const Instance & value() const
void set_integer(const Integer &integer)
const Integer & expression_max() const
const AbstractType & expression_type() const
bool is_optimizable() const
bool is_enum_switch() const
SwitchHelper(Zone *zone, TokenPosition position, bool is_exhaustive, const AbstractType &expression_type, SwitchBlock *switch_block, intptr_t case_count)
void AddExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const GrowableArray< SwitchExpression > & expressions() const
int64_t ExpressionRange() const
intptr_t case_count() const
bool RequiresUpperBoundCheck() const
SwitchDispatch SelectDispatchStrategy()
bool is_exhaustive() const
const Integer & expression_min() const
const TokenPosition & position() const
bool RequiresLowerBoundCheck() const
static Editor::Movement convert(skui::Key key)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
#define DEFINE_FLAG(type, name, default_value, comment)
Dart_NativeFunction function
static float max(float r, float g, float b)
static float min(float r, float g, float b)
#define CASE(Arity, Mask, Name, Args, Result)
#define LOAD_NATIVE_FIELD(V)
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
#define STORE_NATIVE_FIELD(V)
#define IL_BODY(method, slot)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
AlignmentType RecognizedMethodAlignment(MethodRecognizer::Kind kind)
classid_t RecognizedMethodTypeArgCid(MethodRecognizer::Kind kind)
const intptr_t kResultIndex
classid_t ElementExternalTypedDataCid(classid_t class_id)
classid_t ElementTypedDataCid(classid_t class_id)
const Class & GrowableObjectArrayClass()
@ kCheckCovariantTypeParameterBounds
@ kCheckNonCovariantTypeParameterBounds
@ kCheckAllTypeParameterBounds
static const Function & TypedListSetNativeFunction(Thread *thread, classid_t cid)
static classid_t TypedDataCidUnboxed(Representation unboxed_representation)
@ kSwitchDispatchLinearScan
@ kSwitchDispatchJumpTable
@ kSwitchDispatchBinarySearch
static classid_t external_typed_data_cid(intptr_t chunk_size)
static classid_t typed_data_cid(intptr_t chunk_size)
const Function & TypedListGetNativeFunction(Thread *thread, classid_t cid)
static intptr_t chunk_size(intptr_t bytes_left)
static bool CanUnboxElements(classid_t cid)
constexpr int64_t kMaxInt64
static const char *const names[]
@ kSharesInstantiatorTypeArguments
@ kSharesFunctionTypeArguments
bool IsTypedDataBaseClassId(intptr_t index)
static constexpr Representation kUnboxedUword
@ kIsolateLocalClosureCallback
@ kIsolateLocalStaticCallback
@ kUnmodifiableByteDataViewCid
constexpr intptr_t kBitsPerByte
GrowableArray< Value * > InputsArray
bool IsZero(char *begin, char *end)
static constexpr Representation kUnboxedAddress
constexpr int32_t kMaxInt32
intptr_t FfiResolveInternal(const String &asset, const String &symbol, uintptr_t args_n, char **error)
constexpr intptr_t kWordSize
static constexpr Representation kUnboxedIntPtr
static constexpr Representation kUnboxedWord
static constexpr intptr_t kInvalidTryIndex
bool IsExternalTypedDataClassId(intptr_t index)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
std::function< void()> closure
SINT Vec< 2 *N, T > join(const Vec< N, T > &lo, const Vec< N, T > &hi)
static constexpr size_t ValueSize(Representation rep)
static Representation RepresentationOfArrayElement(classid_t cid)
LocalVariable * num_fixed_params
LocalVariable * type_parameter_flags
LocalVariable * num_max_params
LocalVariable * type_parameters
ParsedFunction::DynamicClosureCallVars *const vars
LocalVariable * parent_function_type_args
LocalVariable * num_opt_params
ClosureCallInfo(LocalVariable *closure, JoinEntryInstr *throw_no_such_method, const Array &arguments_descriptor_array, ParsedFunction::DynamicClosureCallVars *const vars)
LocalVariable * instantiator_type_args
const ArgumentsDescriptor descriptor
LocalVariable *const closure
LocalVariable * num_type_parameters
JoinEntryInstr *const throw_no_such_method
LocalVariable * signature
LocalVariable * parameter_types
LocalVariable * num_parent_type_args
LocalVariable * has_named_params
LocalVariable * named_parameter_names