48 "Print huge methods (less optimized)");
51 force_switch_dispatch_type,
53 "Force switch statements to use a particular dispatch type: "
54 "-1=auto, 0=linear scan, 1=binary search, 2=jump table");
59#define H (translation_helper_)
60#define T (type_translator_)
61#define I Isolate::Current()
62#define IG IsolateGroup::Current()
71 intptr_t first_block_id,
72 bool inlining_unchecked_entry)
78 inlining_unchecked_entry),
79 translation_helper_(
Thread::Current()),
80 thread_(translation_helper_.thread()),
81 zone_(translation_helper_.zone()),
82 parsed_function_(parsed_function),
83 optimizing_(optimizing),
84 ic_data_array_(*ic_data_array),
89 block_expression_depth_(0),
90 graph_entry_(nullptr),
92 breakable_block_(nullptr),
93 switch_block_(nullptr),
94 try_catch_block_(nullptr),
95 try_finally_block_(nullptr),
96 catch_block_(nullptr),
97 prepend_type_arguments_(
Function::ZoneHandle(zone_)) {
100 H.InitFromKernelProgramInfo(
info);
105Fragment FlowGraphBuilder::EnterScope(
106 intptr_t kernel_offset,
111 instructions += PushContext(scope);
112 instructions +=
Drop();
114 if (context_scope !=
nullptr) {
115 *context_scope = scope;
120Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
121 Fragment instructions;
122 const intptr_t context_size =
123 scopes_->
scopes.Lookup(kernel_offset)->num_context_variables();
124 if (context_size > 0) {
125 instructions += PopContext();
130Fragment FlowGraphBuilder::AdjustContextTo(
int depth) {
131 ASSERT(depth <= context_depth_ && depth >= 0);
132 Fragment instructions;
135 instructions +=
StoreLocal(TokenPosition::kNoSource,
137 instructions +=
Drop();
143Fragment FlowGraphBuilder::PushContext(
const LocalScope* scope) {
144 ASSERT(scope->num_context_variables() > 0);
147 instructions += LoadLocal(context);
151 instructions +=
StoreLocal(TokenPosition::kNoSource,
157Fragment FlowGraphBuilder::PopContext() {
161Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
163 Fragment instructions;
168 while (
function.IsClosureFunction()) {
177 instructions += LoadLocal(parsed_function_->
receiver_var());
189Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
190 Fragment instructions;
204Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
205 const TypeArguments& type_arguments) {
206 Fragment instructions;
208 auto const mode = type_arguments.GetInstantiationMode(
214 instructions +=
Constant(type_arguments);
226 instructions += LoadInstantiatorTypeArguments();
229 instructions += LoadFunctionTypeArguments();
235 instructions += LoadInstantiatorTypeArguments();
239 if (!type_arguments.IsInstantiated(
kFunctions)) {
240 instructions += LoadFunctionTypeArguments();
250Fragment FlowGraphBuilder::CatchBlockEntry(
const Array& handler_types,
251 intptr_t handler_index,
252 bool needs_stacktrace,
253 bool is_synthesized) {
254 LocalVariable* exception_var = CurrentException();
255 LocalVariable* stacktrace_var = CurrentStackTrace();
256 LocalVariable* raw_exception_var = CurrentRawException();
257 LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
259 CatchBlockEntryInstr* entry =
new (
Z) CatchBlockEntryInstr(
263 stacktrace_var, raw_exception_var, raw_stacktrace_var);
266 Fragment instructions(entry);
271 const bool should_restore_closure_context =
274 if (should_restore_closure_context) {
278 ASSERT(!closure_parameter->is_captured());
279 instructions += LoadLocal(closure_parameter);
281 instructions +=
StoreLocal(TokenPosition::kNoSource, context_variable);
282 instructions +=
Drop();
285 if (exception_var->is_captured()) {
286 instructions += LoadLocal(context_variable);
287 instructions += LoadLocal(raw_exception_var);
291 if (stacktrace_var->is_captured()) {
292 instructions += LoadLocal(context_variable);
293 instructions += LoadLocal(raw_stacktrace_var);
304 ASSERT(!CurrentCatchContext()->is_captured() ||
305 CurrentCatchContext()->owner()->context_level() == 0);
307 instructions += LoadLocal(CurrentCatchContext());
308 instructions +=
StoreLocal(TokenPosition::kNoSource,
310 instructions +=
Drop();
316Fragment FlowGraphBuilder::TryCatch(
int try_handler_index) {
325 body +=
StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
328 return Fragment(body.entry, entry);
331Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
332 TokenPosition position) {
337Fragment FlowGraphBuilder::CloneContext(
338 const ZoneGrowableArray<const Slot*>& context_slots) {
341 Fragment instructions = LoadLocal(context_variable);
343 CloneContextInstr* clone_instruction =
new (
Z) CloneContextInstr(
345 instructions <<= clone_instruction;
346 Push(clone_instruction);
348 instructions +=
StoreLocal(TokenPosition::kNoSource, context_variable);
349 instructions +=
Drop();
353Fragment FlowGraphBuilder::InstanceCall(
354 TokenPosition position,
357 intptr_t type_args_len,
359 const Array& argument_names,
360 intptr_t checked_argument_count,
361 const Function& interface_target,
362 const Function& tearoff_interface_target,
363 const InferredTypeMetadata* result_type,
364 bool use_unchecked_entry,
365 const CallSiteAttributesMetadata* call_site_attrs,
366 bool receiver_is_not_smi,
367 bool is_call_on_this) {
369 const intptr_t total_count =
argument_count + (type_args_len > 0 ? 1 : 0);
371 InstanceCallInstr*
call =
new (
Z) InstanceCallInstr(
372 InstructionSource(position),
name, kind, std::move(arguments),
373 type_args_len, argument_names, checked_argument_count, ic_data_array_,
375 if ((result_type !=
nullptr) && !result_type->IsTrivial()) {
376 call->SetResultType(
Z, result_type->ToCompileType(
Z));
378 if (use_unchecked_entry) {
379 call->set_entry_kind(Code::EntryKind::kUnchecked);
381 if (is_call_on_this) {
382 call->mark_as_call_on_this();
384 if (call_site_attrs !=
nullptr && call_site_attrs->receiver_type !=
nullptr &&
385 call_site_attrs->receiver_type->IsInstantiated()) {
386 call->set_receivers_static_type(call_site_attrs->receiver_type);
387 }
else if (!interface_target.IsNull()) {
389 const AbstractType&
type =
391 call->set_receivers_static_type(&
type);
393 call->set_receiver_is_not_smi(receiver_is_not_smi);
395 instructions <<=
call;
396 if (result_type !=
nullptr && result_type->IsConstant()) {
397 instructions +=
Drop();
398 instructions +=
Constant(result_type->constant_value);
403Fragment FlowGraphBuilder::FfiCall(
404 const compiler::ffi::CallMarshaller& marshaller,
408 const intptr_t num_arguments =
411 FfiCallInstr*
const call =
new (
Z)
412 FfiCallInstr(
GetNextDeoptId(), marshaller, is_leaf, std::move(arguments));
419Fragment FlowGraphBuilder::CallLeafRuntimeEntry(
420 const RuntimeEntry& entry,
422 const ZoneGrowableArray<Representation>& argument_representations) {
425 body += LoadThread();
428 const intptr_t num_arguments = argument_representations.length() + 1;
431 Z, return_representation, argument_representations, std::move(arguments));
438Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
439 int catch_try_index) {
440 Fragment instructions;
443 instructions += Fragment(
new (
Z) ReThrowInstr(
444 InstructionSource(position), catch_try_index,
454Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
457 const ParsedFunction* pf = parsed_function_;
458 if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
459 variable == pf->receiver_var()) {
460 ASSERT(variable == pf->ParameterVariable(0));
461 variable = pf->RawParameterVariable(0);
463 if (variable->is_captured()) {
464 Fragment instructions;
465 instructions +=
LoadContextAt(variable->owner()->context_level());
474IndirectGotoInstr* FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
476 return new (
Z) IndirectGotoInstr(target_count, index);
479Fragment FlowGraphBuilder::ThrowLateInitializationError(
480 TokenPosition position,
481 const char* throw_method_name,
482 const String&
name) {
488 const auto&
error = klass.EnsureIsFinalized(thread_);
490 const Function& throw_new =
492 H.DartSymbolObfuscate(throw_method_name)));
493 ASSERT(!throw_new.IsNull());
495 Fragment instructions;
502 instructions +=
Drop();
507Fragment FlowGraphBuilder::StoreLateField(
const Field& field,
509 LocalVariable* setter_value) {
510 Fragment instructions;
511 TargetEntryInstr* is_uninitialized;
512 TargetEntryInstr* is_initialized;
513 const TokenPosition position = field.token_pos();
514 const bool is_static = field.is_static();
515 const bool is_final = field.is_final();
522 instructions += LoadLocal(
instance);
525 instructions +=
Constant(Object::sentinel());
531 Fragment initialize(is_uninitialized);
537 Fragment already_initialized(is_initialized);
538 already_initialized += ThrowLateInitializationError(
539 position,
"_throwFieldAlreadyInitialized",
544 instructions = Fragment(instructions.entry,
join);
548 instructions += LoadLocal(
instance);
550 instructions += LoadLocal(setter_value);
560Fragment FlowGraphBuilder::NativeCall(
const String&
name,
564 const intptr_t num_args =
567 Fragment instructions;
571 NativeCallInstr*
call =
new (
Z) NativeCallInstr(
573 InstructionSource(
function.end_token_pos()), std::move(arguments));
575 instructions <<=
call;
579Fragment FlowGraphBuilder::Return(TokenPosition position,
580 bool omit_result_type_check) {
581 Fragment instructions;
586 if (!omit_result_type_check &&
function.is_old_native()) {
587 const AbstractType& return_type =
589 instructions += CheckAssignable(return_type, Symbols::FunctionResult());
592 if (NeedsDebugStepCheck(
function, position)) {
601Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
609void FlowGraphBuilder::SetResultTypeForStaticCall(
610 StaticCallInstr*
call,
613 const InferredTypeMetadata* result_type) {
614 if (
call->InitResultType(
Z)) {
616 (result_type->cid ==
call->result_cid()));
619 if ((result_type !=
nullptr) && !result_type->IsTrivial()) {
620 call->SetResultType(
Z, result_type->ToCompileType(
Z));
624Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
627 const Array& argument_names,
629 const InferredTypeMetadata* result_type,
630 intptr_t type_args_count,
631 bool use_unchecked_entry) {
633 const intptr_t total_count =
argument_count + (type_args_count > 0 ? 1 : 0);
635 StaticCallInstr*
call =
new (
Z) StaticCallInstr(
636 InstructionSource(position),
target, type_args_count, argument_names,
637 std::move(arguments), ic_data_array_,
GetNextDeoptId(), rebind_rule);
639 if (use_unchecked_entry) {
640 call->set_entry_kind(Code::EntryKind::kUnchecked);
643 instructions <<=
call;
644 if (result_type !=
nullptr && result_type->IsConstant()) {
645 instructions +=
Drop();
646 instructions +=
Constant(result_type->constant_value);
651Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
655 const Array& argument_names,
656 intptr_t type_args_count) {
657 const intptr_t total_count =
argument_count + (type_args_count > 0 ? 1 : 0);
659 CachableIdempotentCallInstr*
call =
new (
Z) CachableIdempotentCallInstr(
660 InstructionSource(position), representation,
target, type_args_count,
663 return Fragment(
call);
666Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
667 Fragment instructions;
668 instructions += StaticCall(
674Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
675 Fragment instructions;
682Fragment FlowGraphBuilder::ThrowTypeError() {
686 GrowableHandlePtrArray<const String> pieces(
Z, 3);
687 pieces.Add(Symbols::TypeError());
689 pieces.Add(
H.DartSymbolObfuscate(
"_create"));
692 Z, klass.LookupConstructorAllowPrivate(
694 ASSERT(!constructor.IsNull());
696 const String& url =
H.DartString(
700 Fragment instructions;
703 instructions +=
AllocateObject(TokenPosition::kNoSource, klass, 0);
707 instructions += LoadLocal(
instance);
711 instructions +=
Constant(
H.DartSymbolPlain(
"Malformed type."));
713 instructions += StaticCall(TokenPosition::kNoSource, constructor,
715 instructions +=
Drop();
723Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
725 bool incompatible_arguments,
726 bool receiver_pushed) {
730 if (
target.IsImplicitGetterFunction() ||
target.IsGetterFunction() ||
731 target.IsRecordFieldGetter()) {
733 }
else if (
target.IsImplicitSetterFunction() ||
target.IsSetterFunction()) {
737 if (owner.IsTopLevel()) {
738 if (incompatible_arguments) {
739 receiver =
target.UserVisibleSignature();
741 level = InvocationMirror::Level::kTopLevel;
743 receiver = owner.RareType();
744 if (
target.kind() == UntaggedFunction::kConstructor) {
745 level = InvocationMirror::Level::kConstructor;
746 }
else if (
target.IsRecordFieldGetter()) {
747 level = InvocationMirror::Level::kDynamic;
749 level = InvocationMirror::Level::kStatic;
753 Fragment instructions;
754 if (!receiver_pushed) {
763Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
764 const String& selector,
767 bool receiver_pushed) {
771 const auto&
error = klass.EnsureIsFinalized(
H.thread());
774 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNew()));
775 ASSERT(!throw_function.IsNull());
777 Fragment instructions;
778 if (!receiver_pushed) {
787 instructions += StaticCall(position, throw_function, 7,
792LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
793 LocalVariable*
local = scopes_->
locals.Lookup(kernel_offset);
808 !
function.IsDynamicInvocationForwarder()) {
810 FATAL(
"Recognized method %s is not marked with the vm:recognized pragma.",
813 FATAL(
"Non-recognized method %s is marked with the vm:recognized pragma.",
820 intptr_t kernel_data_program_offset =
function.KernelLibraryOffset();
823 this, kernel_data, kernel_data_program_offset);
830 FLAG_huge_method_cutoff_in_ast_nodes) {
831 if (FLAG_print_huge_methods) {
833 "Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
835 "disabled, and the compiler might run out of memory. "
836 "Consider refactoring this code into smaller components.\n",
837 function.QualifiedUserVisibleNameCString(),
843 result->mark_huge_method();
860 for (intptr_t
i = 0;
i <
function.NumParameters(); ++
i) {
866 Return(TokenPosition::kNoSource,
false);
872 case kUnboxedInt32x4:
873 case kUnboxedFloat32x4:
874 case kUnboxedFloat64x2:
885 return state.TypedListGetFloat32();
887 return state.TypedListGetFloat64();
888 case kUnboxedInt32x4:
889 return state.TypedListGetInt32x4();
890 case kUnboxedFloat32x4:
891 return state.TypedListGetFloat32x4();
892 case kUnboxedFloat64x2:
893 return state.TypedListGetFloat64x2();
896 return Object::null_function();
900#define LOAD_NATIVE_FIELD(V) \
901 V(ByteDataViewLength, TypedDataBase_length) \
902 V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
903 V(ByteDataViewTypedData, TypedDataView_typed_data) \
904 V(Finalizer_getCallback, Finalizer_callback) \
905 V(FinalizerBase_getAllEntries, FinalizerBase_all_entries) \
906 V(FinalizerBase_getDetachments, FinalizerBase_detachments) \
907 V(FinalizerEntry_getDetach, FinalizerEntry_detach) \
908 V(FinalizerEntry_getNext, FinalizerEntry_next) \
909 V(FinalizerEntry_getToken, FinalizerEntry_token) \
910 V(FinalizerEntry_getValue, FinalizerEntry_value) \
911 V(NativeFinalizer_getCallback, NativeFinalizer_callback) \
912 V(GrowableArrayLength, GrowableObjectArray_length) \
913 V(ReceivePort_getSendPort, ReceivePort_send_port) \
914 V(ReceivePort_getHandler, ReceivePort_handler) \
915 V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
916 V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
917 V(LinkedHashBase_getData, LinkedHashBase_data) \
918 V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
919 V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
920 V(LinkedHashBase_getIndex, LinkedHashBase_index) \
921 V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
922 V(ObjectArrayLength, Array_length) \
923 V(Record_shape, Record_shape) \
924 V(SuspendState_getFunctionData, SuspendState_function_data) \
925 V(SuspendState_getThenCallback, SuspendState_then_callback) \
926 V(SuspendState_getErrorCallback, SuspendState_error_callback) \
927 V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
928 V(TypedDataViewTypedData, TypedDataView_typed_data) \
929 V(TypedListBaseLength, TypedDataBase_length) \
930 V(WeakProperty_getKey, WeakProperty_key) \
931 V(WeakProperty_getValue, WeakProperty_value) \
932 V(WeakReference_getTarget, WeakReference_target)
934#define STORE_NATIVE_FIELD(V) \
935 V(Finalizer_setCallback, Finalizer_callback) \
936 V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
937 V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
938 V(FinalizerEntry_setToken, FinalizerEntry_token) \
939 V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
940 V(ReceivePort_setHandler, ReceivePort_handler) \
941 V(LinkedHashBase_setData, LinkedHashBase_data) \
942 V(LinkedHashBase_setIndex, LinkedHashBase_index) \
943 V(SuspendState_setFunctionData, SuspendState_function_data) \
944 V(SuspendState_setThenCallback, SuspendState_then_callback) \
945 V(SuspendState_setErrorCallback, SuspendState_error_callback) \
946 V(WeakProperty_setKey, WeakProperty_key) \
947 V(WeakProperty_setValue, WeakProperty_value) \
948 V(WeakReference_setTarget, WeakReference_target)
950#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
951 V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
952 V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
953 V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
960#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
961 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
963 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
965 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
968#undef TYPED_DATA_GET_INDEXED_CASES
969 case MethodRecognizer::kObjectArrayGetIndexed:
970 case MethodRecognizer::kGrowableArrayGetIndexed:
971 case MethodRecognizer::kRecord_fieldAt:
972 case MethodRecognizer::kRecord_fieldNames:
973 case MethodRecognizer::kRecord_numFields:
974 case MethodRecognizer::kSuspendState_clone:
975 case MethodRecognizer::kSuspendState_resume:
976 case MethodRecognizer::kTypedList_GetInt8:
977 case MethodRecognizer::kTypedList_SetInt8:
978 case MethodRecognizer::kTypedList_GetUint8:
979 case MethodRecognizer::kTypedList_SetUint8:
980 case MethodRecognizer::kTypedList_GetInt16:
981 case MethodRecognizer::kTypedList_SetInt16:
982 case MethodRecognizer::kTypedList_GetUint16:
983 case MethodRecognizer::kTypedList_SetUint16:
984 case MethodRecognizer::kTypedList_GetInt32:
985 case MethodRecognizer::kTypedList_SetInt32:
986 case MethodRecognizer::kTypedList_GetUint32:
987 case MethodRecognizer::kTypedList_SetUint32:
988 case MethodRecognizer::kTypedList_GetInt64:
989 case MethodRecognizer::kTypedList_SetInt64:
990 case MethodRecognizer::kTypedList_GetUint64:
991 case MethodRecognizer::kTypedList_SetUint64:
992 case MethodRecognizer::kTypedList_GetFloat32:
993 case MethodRecognizer::kTypedList_SetFloat32:
994 case MethodRecognizer::kTypedList_GetFloat64:
995 case MethodRecognizer::kTypedList_SetFloat64:
996 case MethodRecognizer::kTypedList_GetInt32x4:
997 case MethodRecognizer::kTypedList_SetInt32x4:
998 case MethodRecognizer::kTypedList_GetFloat32x4:
999 case MethodRecognizer::kTypedList_SetFloat32x4:
1000 case MethodRecognizer::kTypedList_GetFloat64x2:
1001 case MethodRecognizer::kTypedList_SetFloat64x2:
1002 case MethodRecognizer::kTypedData_memMove1:
1003 case MethodRecognizer::kTypedData_memMove2:
1004 case MethodRecognizer::kTypedData_memMove4:
1005 case MethodRecognizer::kTypedData_memMove8:
1006 case MethodRecognizer::kTypedData_memMove16:
1007 case MethodRecognizer::kTypedData_ByteDataView_factory:
1008 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
1009 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
1010 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
1011 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
1012 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
1013 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
1014 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
1015 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
1016 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
1017 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
1018 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
1019 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
1020 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
1021 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
1022 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1023 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
1024 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
1025 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
1026 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
1027 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
1028 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
1029 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
1030 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
1031 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
1032 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
1033 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
1034 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
1035 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
1036 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
1037 case MethodRecognizer::kTypedData_Int8Array_factory:
1038 case MethodRecognizer::kTypedData_Uint8Array_factory:
1039 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
1040 case MethodRecognizer::kTypedData_Int16Array_factory:
1041 case MethodRecognizer::kTypedData_Uint16Array_factory:
1042 case MethodRecognizer::kTypedData_Int32Array_factory:
1043 case MethodRecognizer::kTypedData_Uint32Array_factory:
1044 case MethodRecognizer::kTypedData_Int64Array_factory:
1045 case MethodRecognizer::kTypedData_Uint64Array_factory:
1046 case MethodRecognizer::kTypedData_Float32Array_factory:
1047 case MethodRecognizer::kTypedData_Float64Array_factory:
1048 case MethodRecognizer::kTypedData_Float32x4Array_factory:
1049 case MethodRecognizer::kTypedData_Int32x4Array_factory:
1050 case MethodRecognizer::kTypedData_Float64x2Array_factory:
1051 case MethodRecognizer::kMemCopy:
1052 case MethodRecognizer::kFfiLoadInt8:
1053 case MethodRecognizer::kFfiLoadInt16:
1054 case MethodRecognizer::kFfiLoadInt32:
1055 case MethodRecognizer::kFfiLoadInt64:
1056 case MethodRecognizer::kFfiLoadUint8:
1057 case MethodRecognizer::kFfiLoadUint16:
1058 case MethodRecognizer::kFfiLoadUint32:
1059 case MethodRecognizer::kFfiLoadUint64:
1060 case MethodRecognizer::kFfiLoadFloat:
1061 case MethodRecognizer::kFfiLoadFloatUnaligned:
1062 case MethodRecognizer::kFfiLoadDouble:
1063 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1064 case MethodRecognizer::kFfiLoadPointer:
1065 case MethodRecognizer::kFfiNativeCallbackFunction:
1066 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1067 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction:
1068 case MethodRecognizer::kFfiStoreInt8:
1069 case MethodRecognizer::kFfiStoreInt16:
1070 case MethodRecognizer::kFfiStoreInt32:
1071 case MethodRecognizer::kFfiStoreInt64:
1072 case MethodRecognizer::kFfiStoreUint8:
1073 case MethodRecognizer::kFfiStoreUint16:
1074 case MethodRecognizer::kFfiStoreUint32:
1075 case MethodRecognizer::kFfiStoreUint64:
1076 case MethodRecognizer::kFfiStoreFloat:
1077 case MethodRecognizer::kFfiStoreFloatUnaligned:
1078 case MethodRecognizer::kFfiStoreDouble:
1079 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1080 case MethodRecognizer::kFfiStorePointer:
1081 case MethodRecognizer::kFfiFromAddress:
1082 case MethodRecognizer::kFfiGetAddress:
1083 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1084 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1085 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1086 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1087 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1088 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1089 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1090 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1091 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1092 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1093 case MethodRecognizer::kGetNativeField:
1094 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1095 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1096 case MethodRecognizer::kFinalizerBase_setIsolate:
1097 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1098 case MethodRecognizer::kFinalizerEntry_allocate:
1099 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1100 case MethodRecognizer::kCheckNotDeeplyImmutable:
1101 case MethodRecognizer::kObjectEquals:
1102 case MethodRecognizer::kStringBaseCodeUnitAt:
1103 case MethodRecognizer::kStringBaseLength:
1104 case MethodRecognizer::kStringBaseIsEmpty:
1105 case MethodRecognizer::kClassIDgetID:
1106 case MethodRecognizer::kGrowableArrayAllocateWithData:
1107 case MethodRecognizer::kGrowableArrayCapacity:
1108 case MethodRecognizer::kObjectArrayAllocate:
1109 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1110 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1111 case MethodRecognizer::kFfiAbi:
1112 case MethodRecognizer::kUtf8DecoderScan:
1113 case MethodRecognizer::kHas63BitSmis:
1114 case MethodRecognizer::kExtensionStreamHasListener:
1115 case MethodRecognizer::kSmi_hashCode:
1116 case MethodRecognizer::kMint_hashCode:
1117 case MethodRecognizer::kDouble_hashCode:
1118#define CASE(method, slot) case MethodRecognizer::k##method:
1124 case MethodRecognizer::kDoubleToInteger:
1125 case MethodRecognizer::kDoubleMod:
1126 case MethodRecognizer::kDoubleRem:
1127 case MethodRecognizer::kDoubleRoundToDouble:
1128 case MethodRecognizer::kDoubleTruncateToDouble:
1129 case MethodRecognizer::kDoubleFloorToDouble:
1130 case MethodRecognizer::kDoubleCeilToDouble:
1131 case MethodRecognizer::kMathDoublePow:
1132 case MethodRecognizer::kMathSin:
1133 case MethodRecognizer::kMathCos:
1134 case MethodRecognizer::kMathTan:
1135 case MethodRecognizer::kMathAsin:
1136 case MethodRecognizer::kMathAcos:
1137 case MethodRecognizer::kMathAtan:
1138 case MethodRecognizer::kMathAtan2:
1139 case MethodRecognizer::kMathExp:
1140 case MethodRecognizer::kMathLog:
1141 case MethodRecognizer::kMathSqrt:
1151 switch (
function.recognized_kind()) {
1152 case MethodRecognizer::kStringBaseCodeUnitAt:
1159FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1171 BuildPrologue(normal_entry, &prologue_info);
1174 body += CheckStackOverflowInPrologue(
function.token_pos());
1176 if (
function.IsDynamicInvocationForwarder()) {
1177 body += BuildDefaultTypeHandling(
function);
1178 BuildTypeArgumentTypeChecks(
1180 BuildArgumentTypeChecks(&body, &body,
nullptr);
1185#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
1186 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
1188 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
1190 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
1193#undef TYPED_DATA_GET_INDEXED_CASES
1194 case MethodRecognizer::kObjectArrayGetIndexed:
1195 case MethodRecognizer::kGrowableArrayGetIndexed: {
1207 const auto& native_function =
1209 body += LoadLocal(safe_index);
1215 body += StaticCall(TokenPosition::kNoSource, native_function, 2,
1218 if (kind == MethodRecognizer::kGrowableArrayGetIndexed) {
1220 array_cid = kArrayCid;
1225 body += LoadLocal(safe_index);
1232 if (elem_rep == kUnboxedFloat) {
1239 case MethodRecognizer::kRecord_fieldAt:
1246 case MethodRecognizer::kRecord_fieldNames:
1247 body += LoadObjectStore();
1258 case MethodRecognizer::kRecord_numFields:
1264 case MethodRecognizer::kSuspendState_clone: {
1267 body += Call1ArgStub(TokenPosition::kNoSource,
1271 case MethodRecognizer::kSuspendState_resume: {
1272 const Code& resume_stub =
1278 case MethodRecognizer::kTypedList_GetInt8:
1279 body += BuildTypedListGet(
function, kTypedDataInt8ArrayCid);
1281 case MethodRecognizer::kTypedList_SetInt8:
1282 body += BuildTypedListSet(
function, kTypedDataInt8ArrayCid);
1284 case MethodRecognizer::kTypedList_GetUint8:
1285 body += BuildTypedListGet(
function, kTypedDataUint8ArrayCid);
1287 case MethodRecognizer::kTypedList_SetUint8:
1288 body += BuildTypedListSet(
function, kTypedDataUint8ArrayCid);
1290 case MethodRecognizer::kTypedList_GetInt16:
1291 body += BuildTypedListGet(
function, kTypedDataInt16ArrayCid);
1293 case MethodRecognizer::kTypedList_SetInt16:
1294 body += BuildTypedListSet(
function, kTypedDataInt16ArrayCid);
1296 case MethodRecognizer::kTypedList_GetUint16:
1297 body += BuildTypedListGet(
function, kTypedDataUint16ArrayCid);
1299 case MethodRecognizer::kTypedList_SetUint16:
1300 body += BuildTypedListSet(
function, kTypedDataUint16ArrayCid);
1302 case MethodRecognizer::kTypedList_GetInt32:
1303 body += BuildTypedListGet(
function, kTypedDataInt32ArrayCid);
1305 case MethodRecognizer::kTypedList_SetInt32:
1306 body += BuildTypedListSet(
function, kTypedDataInt32ArrayCid);
1308 case MethodRecognizer::kTypedList_GetUint32:
1309 body += BuildTypedListGet(
function, kTypedDataUint32ArrayCid);
1311 case MethodRecognizer::kTypedList_SetUint32:
1312 body += BuildTypedListSet(
function, kTypedDataUint32ArrayCid);
1314 case MethodRecognizer::kTypedList_GetInt64:
1315 body += BuildTypedListGet(
function, kTypedDataInt64ArrayCid);
1317 case MethodRecognizer::kTypedList_SetInt64:
1318 body += BuildTypedListSet(
function, kTypedDataInt64ArrayCid);
1320 case MethodRecognizer::kTypedList_GetUint64:
1321 body += BuildTypedListGet(
function, kTypedDataUint64ArrayCid);
1323 case MethodRecognizer::kTypedList_SetUint64:
1324 body += BuildTypedListSet(
function, kTypedDataUint64ArrayCid);
1326 case MethodRecognizer::kTypedList_GetFloat32:
1327 body += BuildTypedListGet(
function, kTypedDataFloat32ArrayCid);
1329 case MethodRecognizer::kTypedList_SetFloat32:
1330 body += BuildTypedListSet(
function, kTypedDataFloat32ArrayCid);
1332 case MethodRecognizer::kTypedList_GetFloat64:
1333 body += BuildTypedListGet(
function, kTypedDataFloat64ArrayCid);
1335 case MethodRecognizer::kTypedList_SetFloat64:
1336 body += BuildTypedListSet(
function, kTypedDataFloat64ArrayCid);
1338 case MethodRecognizer::kTypedList_GetInt32x4:
1339 body += BuildTypedListGet(
function, kTypedDataInt32x4ArrayCid);
1341 case MethodRecognizer::kTypedList_SetInt32x4:
1342 body += BuildTypedListSet(
function, kTypedDataInt32x4ArrayCid);
1344 case MethodRecognizer::kTypedList_GetFloat32x4:
1345 body += BuildTypedListGet(
function, kTypedDataFloat32x4ArrayCid);
1347 case MethodRecognizer::kTypedList_SetFloat32x4:
1348 body += BuildTypedListSet(
function, kTypedDataFloat32x4ArrayCid);
1350 case MethodRecognizer::kTypedList_GetFloat64x2:
1351 body += BuildTypedListGet(
function, kTypedDataFloat64x2ArrayCid);
1353 case MethodRecognizer::kTypedList_SetFloat64x2:
1354 body += BuildTypedListSet(
function, kTypedDataFloat64x2ArrayCid);
1356 case MethodRecognizer::kTypedData_memMove1:
1357 body += BuildTypedDataMemMove(
function, kTypedDataInt8ArrayCid);
1359 case MethodRecognizer::kTypedData_memMove2:
1360 body += BuildTypedDataMemMove(
function, kTypedDataInt16ArrayCid);
1362 case MethodRecognizer::kTypedData_memMove4:
1363 body += BuildTypedDataMemMove(
function, kTypedDataInt32ArrayCid);
1365 case MethodRecognizer::kTypedData_memMove8:
1366 body += BuildTypedDataMemMove(
function, kTypedDataInt64ArrayCid);
1368 case MethodRecognizer::kTypedData_memMove16:
1369 body += BuildTypedDataMemMove(
function, kTypedDataInt32x4ArrayCid);
1372 case MethodRecognizer::kTypedData_##name##_factory: \
1373 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1375 case MethodRecognizer::kTypedData_##name##View_factory: \
1376 body += BuildTypedDataViewFactoryConstructor(function, \
1377 kTypedData##name##ViewCid); \
1379 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1380 body += BuildTypedDataViewFactoryConstructor( \
1381 function, kUnmodifiableTypedData##name##ViewCid); \
1385 case MethodRecognizer::kTypedData_ByteDataView_factory:
1388 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1389 body += BuildTypedDataViewFactoryConstructor(
1392 case MethodRecognizer::kObjectEquals:
1398 case MethodRecognizer::kStringBaseCodeUnitAt: {
1408 TargetEntryInstr* one_byte_string;
1409 TargetEntryInstr* two_byte_string;
1415 body.
current = one_byte_string;
1417 body += LoadLocal(safe_index);
1427 body.
current = two_byte_string;
1429 body += LoadLocal(safe_index);
1441 body += LoadLocal(
result);
1443 case MethodRecognizer::kStringBaseLength:
1444 case MethodRecognizer::kStringBaseIsEmpty:
1448 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1453 case MethodRecognizer::kClassIDgetID:
1458 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1466 body += LoadLocal(
object);
1471 body += LoadLocal(
object);
1478 case MethodRecognizer::kGrowableArrayCapacity:
1484 case MethodRecognizer::kObjectArrayAllocate:
1490 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1497 body +=
MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1502 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1513 case MethodRecognizer::kUtf8DecoderScan:
1527 case MethodRecognizer::kMemCopy: {
1530 LocalVariable* arg_target_offset_in_bytes =
1533 LocalVariable* arg_source_offset_in_bytes =
1535 LocalVariable* arg_length_in_bytes =
1537 body += LoadLocal(arg_source);
1538 body += LoadLocal(arg_target);
1539 body += LoadLocal(arg_source_offset_in_bytes);
1541 body += LoadLocal(arg_target_offset_in_bytes);
1543 body += LoadLocal(arg_length_in_bytes);
1545 body +=
MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1550 case MethodRecognizer::kFfiAbi:
1554 case MethodRecognizer::kFfiNativeCallbackFunction:
1555 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1556 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction: {
1559 "This function should be handled on call site."));
1564 case MethodRecognizer::kFfiLoadInt8:
1565 case MethodRecognizer::kFfiLoadInt16:
1566 case MethodRecognizer::kFfiLoadInt32:
1567 case MethodRecognizer::kFfiLoadInt64:
1568 case MethodRecognizer::kFfiLoadUint8:
1569 case MethodRecognizer::kFfiLoadUint16:
1570 case MethodRecognizer::kFfiLoadUint32:
1571 case MethodRecognizer::kFfiLoadUint64:
1572 case MethodRecognizer::kFfiLoadFloat:
1573 case MethodRecognizer::kFfiLoadFloatUnaligned:
1574 case MethodRecognizer::kFfiLoadDouble:
1575 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1576 case MethodRecognizer::kFfiLoadPointer: {
1586 LocalVariable* arg_typed_data_base =
1590 body += LoadLocal(arg_typed_data_base);
1592 body += LoadLocal(arg_offset);
1597 if (kind == MethodRecognizer::kFfiLoadPointer) {
1598 const auto& pointer_class =
1601 Z,
IG->object_store()->type_argument_never());
1607 body +=
AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1609 body += LoadLocal(pointer);
1610 body += LoadLocal(address);
1624 case MethodRecognizer::kFfiStoreInt8:
1625 case MethodRecognizer::kFfiStoreInt16:
1626 case MethodRecognizer::kFfiStoreInt32:
1627 case MethodRecognizer::kFfiStoreInt64:
1628 case MethodRecognizer::kFfiStoreUint8:
1629 case MethodRecognizer::kFfiStoreUint16:
1630 case MethodRecognizer::kFfiStoreUint32:
1631 case MethodRecognizer::kFfiStoreUint64:
1632 case MethodRecognizer::kFfiStoreFloat:
1633 case MethodRecognizer::kFfiStoreFloatUnaligned:
1634 case MethodRecognizer::kFfiStoreDouble:
1635 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1636 case MethodRecognizer::kFfiStorePointer: {
1645 LocalVariable* arg_typed_data_base =
1652 body += LoadLocal(arg_typed_data_base);
1654 body += LoadLocal(arg_offset);
1657 body += LoadLocal(arg_value);
1659 if (kind == MethodRecognizer::kFfiStorePointer) {
1669 body += UnboxTruncate(
1676 case MethodRecognizer::kFfiFromAddress: {
1677 const auto& pointer_class =
1680 Z,
IG->object_store()->type_argument_never());
1685 body +=
AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1698 case MethodRecognizer::kFfiGetAddress: {
1708 case MethodRecognizer::kHas63BitSmis: {
1709#if defined(HAS_SMI_63_BITS)
1715 case MethodRecognizer::kExtensionStreamHasListener: {
1719 body += LoadServiceExtensionStream();
1723 body += IntToBool();
1726 case MethodRecognizer::kSmi_hashCode: {
1734 body += BuildIntegerHashCode(
true);
1736 case MethodRecognizer::kMint_hashCode: {
1739 body += BuildIntegerHashCode(
false);
1741 case MethodRecognizer::kDouble_hashCode: {
1744 body += UnboxTruncate(kUnboxedDouble);
1745 body += BuildDoubleHashCode();
1746 body +=
Box(kUnboxedInt64);
1748 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1749 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1750 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1751 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1752 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1753 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1754 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1755 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1756 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1757 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1765 const auto& typed_data_class =
1774 body +=
AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1778 body += LoadLocal(typed_data_object);
1779 body += LoadLocal(arg_length);
1785 body += LoadLocal(typed_data_object);
1786 body += LoadLocal(arg_pointer);
1793 case MethodRecognizer::kGetNativeField: {
1807 case MethodRecognizer::kDoubleToInteger: {
1811 case MethodRecognizer::kDoubleMod:
1812 case MethodRecognizer::kDoubleRem:
1813 case MethodRecognizer::kDoubleRoundToDouble:
1814 case MethodRecognizer::kDoubleTruncateToDouble:
1815 case MethodRecognizer::kDoubleFloorToDouble:
1816 case MethodRecognizer::kDoubleCeilToDouble:
1817 case MethodRecognizer::kMathDoublePow:
1818 case MethodRecognizer::kMathSin:
1819 case MethodRecognizer::kMathCos:
1820 case MethodRecognizer::kMathTan:
1821 case MethodRecognizer::kMathAsin:
1822 case MethodRecognizer::kMathAcos:
1823 case MethodRecognizer::kMathAtan:
1824 case MethodRecognizer::kMathAtan2:
1825 case MethodRecognizer::kMathExp:
1826 case MethodRecognizer::kMathLog: {
1827 for (intptr_t
i = 0, n =
function.NumParameters();
i < n; ++
i) {
1832 case MethodRecognizer::kMathSqrt: {
1836 case MethodRecognizer::kFinalizerBase_setIsolate:
1839 body += LoadIsolate();
1844 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1846 body += LoadIsolate();
1849 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1851 body += LoadIsolate();
1856 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1858 ASSERT(this->optimizing_);
1871 case MethodRecognizer::kFinalizerEntry_allocate: {
1876 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1877 const auto& finalizer_entry_class =
1881 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1884 body += LoadLocal(entry);
1887 body += LoadLocal(entry);
1890 body += LoadLocal(entry);
1893 body += LoadLocal(entry);
1896 body += LoadLocal(entry);
1901 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1905 body +=
Box(kUnboxedInt64);
1907 case MethodRecognizer::kCheckNotDeeplyImmutable:
1914#define IL_BODY(method, slot) \
1915 case MethodRecognizer::k##method: \
1916 ASSERT_EQUAL(function.NumParameters(), 1); \
1917 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1918 body += LoadNativeField(Slot::slot()); \
1922#define IL_BODY(method, slot) \
1923 case MethodRecognizer::k##method: \
1924 ASSERT_EQUAL(function.NumParameters(), 2); \
1925 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1926 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1927 body += StoreNativeField(Slot::slot()); \
1928 body += NullConstant(); \
1932#define IL_BODY(method, slot) \
1933 case MethodRecognizer::k##method: \
1934 ASSERT_EQUAL(function.NumParameters(), 2); \
1935 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1936 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1937 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1939 body += NullConstant(); \
1951 Return(TokenPosition::kNoSource,
true);
1959Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1962 auto token_pos =
function.token_pos();
1965 ASSERT(class_table->HasValidClassAt(
cid));
1986 body += LoadLocal(view_object);
1987 body += LoadLocal(typed_data);
1991 body += LoadLocal(view_object);
1992 body += LoadLocal(offset_in_bytes);
1997 body += LoadLocal(view_object);
1998 body += LoadLocal(
length);
2007 body += LoadLocal(offset_in_bytes);
2009 LocalVariable* unboxed_offset_in_bytes =
2016 body += LoadLocal(view_object);
2017 body += LoadLocal(typed_data);
2021 body += LoadLocal(unboxed_offset_in_bytes);
2031Fragment FlowGraphBuilder::BuildTypedListGet(
const Function&
function,
2033 const intptr_t kNumParameters = 2;
2039 LocalVariable* arg_offset_in_bytes =
2044 body += LoadLocal(arg_receiver);
2045 body += LoadLocal(arg_offset_in_bytes);
2051 body += LoadLocal(arg_receiver);
2052 body += LoadLocal(arg_offset_in_bytes);
2053 body += StaticCall(TokenPosition::kNoSource, native_function,
2054 kNumParameters, ICData::kNoRebind);
2064 return state.TypedListSetFloat32();
2065 case kUnboxedDouble:
2066 return state.TypedListSetFloat64();
2067 case kUnboxedInt32x4:
2068 return state.TypedListSetInt32x4();
2069 case kUnboxedFloat32x4:
2070 return state.TypedListSetFloat32x4();
2071 case kUnboxedFloat64x2:
2072 return state.TypedListSetFloat64x2();
2075 return Object::null_function();
2081 const intptr_t kNumParameters = 3;
2093 body += LoadLocal(arg_receiver);
2094 body += LoadLocal(arg_offset_in_bytes);
2095 body += LoadLocal(arg_value);
2104 body += LoadLocal(arg_receiver);
2105 body += LoadLocal(arg_offset_in_bytes);
2106 body += LoadLocal(arg_value);
2107 body += StaticCall(TokenPosition::kNoSource, native_function,
2108 kNumParameters, ICData::kNoRebind);
2113Fragment FlowGraphBuilder::BuildTypedDataMemMove(
const Function&
function,
2125#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2129 const intptr_t kCopyLengthForCCall = 1024 * 1024;
2137 const intptr_t kCopyLengthForCCall = 1024;
2141 TargetEntryInstr *is_small_enough, *is_too_large;
2142 body += LoadLocal(arg_count);
2147 Fragment use_instruction(is_small_enough);
2148 use_instruction += LoadLocal(arg_from);
2149 use_instruction += LoadLocal(arg_to);
2150 use_instruction += LoadLocal(arg_from_start);
2151 use_instruction += LoadLocal(arg_to_start);
2152 use_instruction += LoadLocal(arg_count);
2157 Fragment call_memmove(is_too_large);
2159 auto*
const arg_reps =
2160 new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
2164 call_memmove += LoadLocal(arg_to_start);
2166 LocalVariable* to_start_unboxed =
MakeTemporary(
"to_start_unboxed");
2167 call_memmove += LoadLocal(arg_from_start);
2169 LocalVariable* from_start_unboxed =
MakeTemporary(
"from_start_unboxed");
2172 call_memmove += LoadLocal(arg_count);
2173 call_memmove += UnboxTruncate(size_rep);
2177 LocalVariable* length_in_bytes =
MakeTemporary(
"length_in_bytes");
2179 call_memmove += LoadLocal(arg_to);
2182 call_memmove += LoadLocal(to_start_unboxed);
2185 arg_reps->Add(kUntagged);
2187 call_memmove += LoadLocal(arg_from);
2190 call_memmove += LoadLocal(from_start_unboxed);
2193 arg_reps->Add(kUntagged);
2195 call_memmove += LoadLocal(length_in_bytes);
2196 arg_reps->Add(size_rep);
2199 CallLeafRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
2201 call_memmove +=
Drop();
2207 body.current =
done;
2213Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
2216 const auto token_pos =
function.token_pos();
2223 Fragment instructions;
2224 instructions += LoadLocal(
length);
2228 return instructions;
2231Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
2232 TokenPosition position,
2233 const Function&
target) {
2240 fragment += LoadLocal(parsed_function_->
receiver_var());
2242 const bool has_instantiator_type_args =
2244 if (has_instantiator_type_args) {
2245 fragment += LoadInstantiatorTypeArguments();
2248 target.IsGeneric(),
true);
2253Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
2254 const AbstractType& dst_type,
2255 const String& name_symbol) {
2259bool FlowGraphBuilder::NeedsDebugStepCheck(
const Function&
function,
2260 TokenPosition position) {
2261 return position.IsDebugPause() && !
function.is_native() &&
2265bool FlowGraphBuilder::NeedsDebugStepCheck(
Value*
value,
2266 TokenPosition position) {
2267 if (!position.IsDebugPause()) {
2270 Definition* definition =
value->definition();
2271 if (definition->IsConstant() || definition->IsLoadStaticField() ||
2272 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
2273 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
2276 if (
auto const alloc = definition->AsAllocateClosure()) {
2277 return !alloc->known_function().IsNull();
2282Fragment FlowGraphBuilder::EvaluateAssertion() {
2283 const Class& klass =
2286 const auto&
error = klass.EnsureIsFinalized(
H.thread());
2289 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
2291 return StaticCall(TokenPosition::kNoSource,
target, 1,
2295Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
2296 Fragment instructions;
2298 instructions += LoadLocal(top_of_stack);
2300 instructions +=
Drop();
2301 return instructions;
2304Fragment FlowGraphBuilder::CheckAssignable(
const AbstractType& dst_type,
2305 const String& dst_name,
2307 TokenPosition token_pos) {
2308 Fragment instructions;
2309 if (!dst_type.IsTopTypeForSubtyping()) {
2311 instructions += LoadLocal(top_of_stack);
2313 AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
2314 instructions +=
Drop();
2316 return instructions;
2319Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
2320 TokenPosition position,
2321 const AbstractType& dst_type,
2322 const String& dst_name,
2324 Fragment instructions;
2329 instructions += LoadInstantiatorTypeArguments();
2335 instructions += LoadFunctionTypeArguments();
2342 return instructions;
2345Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
2346 const AbstractType& sub_type_value,
2347 const AbstractType& super_type_value,
2348 const String& dst_name_value) {
2349 Fragment instructions;
2350 instructions += LoadInstantiatorTypeArguments();
2351 instructions += LoadFunctionTypeArguments();
2355 instructions += AssertSubtype(position);
2356 return instructions;
2359Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
2360 Fragment instructions;
2366 Value* instantiator_type_args =
Pop();
2368 AssertSubtypeInstr* instr =
new (
Z) AssertSubtypeInstr(
2369 InstructionSource(position), instantiator_type_args, function_type_args,
2371 instructions += Fragment(instr);
2373 return instructions;
2377 Fragment* implicit_checks) {
2378 const Function& dart_function = parsed_function_->
function();
2380 const Function* forwarding_target =
nullptr;
2383 ASSERT(!forwarding_target->IsNull());
2387 if (dart_function.IsFactory()) {
2388 type_parameters =
Class::Handle(
Z, dart_function.Owner()).type_parameters();
2390 type_parameters = dart_function.type_parameters();
2392 const intptr_t num_type_params = type_parameters.Length();
2393 if (num_type_params == 0)
return;
2394 if (forwarding_target !=
nullptr) {
2395 type_parameters = forwarding_target->type_parameters();
2396 ASSERT(type_parameters.Length() == num_type_params);
2398 if (type_parameters.AllDynamicBounds()) {
2405 for (intptr_t
i = 0;
i < num_type_params; ++
i) {
2406 bound = type_parameters.BoundAt(
i);
2407 if (bound.IsTopTypeForSubtyping()) {
2415 if (!type_parameters.IsGenericCovariantImplAt(
i)) {
2420 if (type_parameters.IsGenericCovariantImplAt(
i)) {
2426 name = type_parameters.NameAt(
i);
2428 if (forwarding_target !=
nullptr) {
2429 type_param = forwarding_target->TypeParameterAt(
i);
2430 }
else if (dart_function.IsFactory()) {
2431 type_param =
Class::Handle(
Z, dart_function.Owner()).TypeParameterAt(
i);
2433 type_param = dart_function.TypeParameterAt(
i);
2435 ASSERT(type_param.IsFinalized());
2437 AssertSubtype(TokenPosition::kNoSource, type_param, bound,
name);
2443 if (dart_function.IsClosureFunction() && !
check_bounds.is_empty() &&
2444 FLAG_eliminate_type_checks) {
2453void FlowGraphBuilder::BuildArgumentTypeChecks(
2454 Fragment* explicit_checks,
2455 Fragment* implicit_checks,
2456 Fragment* implicit_redefinitions) {
2457 const Function& dart_function = parsed_function_->
function();
2459 const Function* forwarding_target =
nullptr;
2462 ASSERT(!forwarding_target->IsNull());
2465 const intptr_t num_params = dart_function.NumParameters();
2466 for (intptr_t
i = dart_function.NumImplicitParameters();
i < num_params;
2469 const String&
name = param->name();
2470 if (!param->needs_type_check()) {
2473 if (param->is_captured()) {
2477 const AbstractType* target_type = ¶m->
static_type();
2478 if (forwarding_target !=
nullptr) {
2484 if (target_type->IsTopTypeForSubtyping())
continue;
2486 const bool is_covariant = param->is_explicit_covariant_parameter();
2487 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2489 *checks += LoadLocal(param);
2490 *checks += AssertAssignableLoadTypeArguments(
2491 param->token_pos(), *target_type,
name,
2492 AssertAssignableInstr::kParameterCheck);
2496 if (!is_covariant && implicit_redefinitions !=
nullptr && optimizing_) {
2499 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2501 *implicit_redefinitions += LoadLocal(param);
2503 *implicit_redefinitions +=
StoreLocal(TokenPosition::kNoSource, param);
2504 *implicit_redefinitions +=
Drop();
2509BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2510 PrologueInfo* prologue_info) {
2513 kernel::PrologueBuilder prologue_builder(
2515 BlockEntryInstr* instruction_cursor =
2516 prologue_builder.BuildPrologue(normal_entry, prologue_info);
2520 return instruction_cursor;
2523ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(
const Function&
function) {
2524 if (!
function.HasOptionalNamedParameters()) {
2528 const intptr_t num_fixed_params =
function.num_fixed_parameters();
2529 const intptr_t num_opt_params =
function.NumOptionalNamedParameters();
2532 for (intptr_t
i = 0;
i < num_opt_params; ++
i) {
2539Fragment FlowGraphBuilder::PushExplicitParameters(
2541 const Function&
target ) {
2542 Fragment instructions;
2543 for (intptr_t
i =
function.NumImplicitParameters(),
2547 if (!
target.IsNull() &&
target.is_unboxed_parameter_at(
i)) {
2549 if (
target.is_unboxed_integer_parameter_at(
i)) {
2553 to = kUnboxedDouble;
2558 push_param += Fragment(unbox);
2560 instructions += push_param;
2562 return instructions;
2565FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2566 const Function& method) {
2577 Fragment body(normal_entry);
2578 body += CheckStackOverflowInPrologue(method.token_pos());
2579 body += BuildImplicitClosureCreation(TokenPosition::kNoSource,
function);
2580 body += Return(TokenPosition::kNoSource);
2583 PrologueInfo prologue_info(-1, -1);
2589FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2601 PrologueInfo prologue_info(-1, -1);
2602 BlockEntryInstr* instruction_cursor =
2603 BuildPrologue(normal_entry, &prologue_info);
2605 Fragment body(instruction_cursor);
2606 body += CheckStackOverflowInPrologue(
function.token_pos());
2621 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2623 body +=
IntConstant(receiver_index + descriptor.Size());
2626 if (receiver_index > 0) {
2628 ASSERT(type_args !=
nullptr);
2629 body += LoadLocal(array);
2631 body += LoadLocal(type_args);
2634 for (intptr_t
i = 0;
i < descriptor.PositionalCount(); ++
i) {
2635 body += LoadLocal(array);
2641 for (intptr_t
i = 0;
i < descriptor.NamedCount(); ++
i) {
2642 const intptr_t parameter_index = descriptor.PositionAt(
i);
2643 name = descriptor.NameAt(
i);
2645 body += LoadLocal(array);
2646 body +=
IntConstant(receiver_index + parameter_index);
2654 const Class& mirror_class =
2656 ASSERT(!mirror_class.IsNull());
2657 const auto&
error = mirror_class.EnsureIsFinalized(
H.thread());
2660 Z, mirror_class.LookupStaticFunction(
2662 ASSERT(!allocation_function.IsNull());
2664 4, ICData::kStatic);
2666 const int kTypeArgsLen = 0;
2667 ArgumentsDescriptor two_arguments(
2672 two_arguments,
true));
2673 if (no_such_method.IsNull()) {
2678 Symbols::NoSuchMethod(), two_arguments,
true);
2681 2, ICData::kNSMDispatch);
2682 body += Return(TokenPosition::kNoSource);
2689FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2700 Fragment body(normal_entry);
2701 body += CheckStackOverflowInPrologue(
function.token_pos());
2709 const auto&
error = cls.EnsureIsFinalized(thread_);
2712 Z, cls.LookupFunctionAllowPrivate(Symbols::Get_fieldNames()));
2713 ASSERT(!get_field_names_function.IsNull());
2715 body += StaticCall(TokenPosition::kNoSource, get_field_names_function, 1,
2719 body += LoadLocal(field_names);
2728 body += LoadLocal(num_named);
2730 LocalVariable* num_positional =
MakeTemporary(
"num_positional");
2732 const intptr_t field_index =
2734 if (field_index >= 0) {
2737 body += LoadLocal(num_positional);
2739 TargetEntryInstr* valid_index;
2740 TargetEntryInstr* invalid_index;
2743 body.current = valid_index;
2753 body.current = invalid_index;
2758 body += LoadLocal(num_named);
2760 TargetEntryInstr* has_named_fields;
2761 TargetEntryInstr* no_named_fields;
2762 body +=
BranchIfTrue(&has_named_fields, &no_named_fields);
2764 Fragment(no_named_fields) +
Goto(nsm);
2765 body.current = has_named_fields;
2769 body +=
StoreLocal(TokenPosition::kNoSource, index);
2774 body.current = loop;
2776 body += LoadLocal(field_names);
2777 body += LoadLocal(index);
2781 TargetEntryInstr* found;
2782 TargetEntryInstr* continue_search;
2785 body.current = continue_search;
2786 body += LoadLocal(index);
2789 body +=
StoreLocal(TokenPosition::kNoSource, index);
2792 body += LoadLocal(index);
2793 body += LoadLocal(num_named);
2795 TargetEntryInstr* has_more_fields;
2796 TargetEntryInstr* no_more_fields;
2797 body +=
BranchIfTrue(&has_more_fields, &no_more_fields);
2799 Fragment(has_more_fields) +
Goto(loop);
2800 Fragment(no_more_fields) +
Goto(nsm);
2802 body.current = found;
2806 body += LoadLocal(num_positional);
2807 body += LoadLocal(index);
2818 body.current =
done;
2822 body += Return(TokenPosition::kNoSource);
2824 Fragment throw_nsm(nsm);
2825 throw_nsm += LoadLocal(parsed_function_->
receiver_var());
2826 throw_nsm += ThrowNoSuchMethodError(TokenPosition::kNoSource,
function,
2832 PrologueInfo prologue_info(-1, -1);
2842 const Array& arguments_descriptor_array,
2872Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2879 check += LoadLocal(
info.type_parameters);
2884 generic.Prepend(is_generic);
2885 generic +=
Goto(after_branch);
2887 not_generic.
Prepend(is_not_generic);
2888 not_generic +=
Goto(after_branch);
2893Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2894 const ClosureCallInfo&
info,
2899 check_required += LoadLocal(
info.vars->current_param_index);
2903 check_required += LoadLocal(
info.num_opt_params);
2913 check_required += LoadLocal(flags_index);
2914 check_required += LoadLocal(
info.named_parameter_names);
2919 check_required +=
BranchIfTrue(&valid_index, &invalid_index);
2928 check_required.
current = valid_index;
2929 check_required += LoadLocal(
info.named_parameter_names);
2930 check_required += LoadLocal(flags_index);
2933 check_required += LoadLocal(
info.vars->current_param_index);
2940 "IL builder assumes only one flag bit per parameter");
2952 set.Prepend(is_set);
2955 not_set.
Prepend(join_not_set);
2956 not_set +=
Goto(after_check);
2959 check_required.
current = after_check;
2961 return check_required;
2964Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2965 const ClosureCallInfo&
info) {
2966 if (
info.descriptor.TypeArgsLen() > 0) {
2970 Fragment store_provided;
2973 store_provided +=
Drop();
2974 return store_provided;
2979 Fragment store_default;
2980 store_default += LoadLocal(
info.closure);
2983 LocalVariable* closure_data =
MakeTemporary(
"closure_data");
2985 store_default += LoadLocal(closure_data);
2986 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
2988 LocalVariable* default_tav_kind =
MakeTemporary(
"default_tav_kind");
2993 store_default += LoadLocal(default_tav_kind);
2994 TargetEntryInstr* is_instantiated;
2995 TargetEntryInstr* is_not_instantiated;
2998 store_default +=
BranchIfEqual(&is_instantiated, &is_not_instantiated);
2999 store_default.current = is_not_instantiated;
3000 store_default += LoadLocal(default_tav_kind);
3001 TargetEntryInstr* needs_instantiation;
3002 TargetEntryInstr* can_share;
3005 store_default +=
BranchIfEqual(&needs_instantiation, &can_share);
3006 store_default.current = can_share;
3007 store_default += LoadLocal(default_tav_kind);
3008 TargetEntryInstr* can_share_instantiator;
3009 TargetEntryInstr* can_share_function;
3010 store_default +=
IntConstant(
static_cast<intptr_t
>(
3012 store_default +=
BranchIfEqual(&can_share_instantiator, &can_share_function);
3014 Fragment instantiated(is_instantiated);
3015 instantiated += LoadLocal(
info.type_parameters);
3018 instantiated +=
Drop();
3021 Fragment do_instantiation(needs_instantiation);
3023 do_instantiation += LoadLocal(
info.instantiator_type_args);
3026 do_instantiation += LoadLocal(
info.parent_function_type_args);
3028 do_instantiation += LoadLocal(
info.type_parameters);
3032 do_instantiation +=
Drop();
3035 Fragment share_instantiator(can_share_instantiator);
3036 share_instantiator += LoadLocal(
info.instantiator_type_args);
3038 share_instantiator +=
Drop();
3041 Fragment share_function(can_share_function);
3044 share_function += LoadLocal(
info.parent_function_type_args);
3046 share_function +=
Drop();
3049 store_default.current =
done;
3053 Fragment store_delayed;
3054 store_delayed += LoadLocal(
info.closure);
3055 store_delayed +=
LoadNativeField(Slot::Closure_delayed_type_arguments());
3057 store_delayed +=
Drop();
3063Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
3064 const ClosureCallInfo&
info) {
3067 if (
info.descriptor.NamedCount() == 0) {
3071 "IL builder assumes only one flag bit per parameter");
3077 has_any += LoadLocal(
info.num_opt_params);
3078 has_any += LoadLocal(
info.named_parameter_names);
3080 TargetEntryInstr* no_required;
3081 TargetEntryInstr* has_required;
3084 Fragment(has_required) +
Goto(
info.throw_no_such_method);
3086 return Fragment(has_any.entry, no_required);
3091 Fragment check_names;
3092 check_names += LoadLocal(
info.vars->current_param_index);
3094 check_names += LoadLocal(
info.vars->current_num_processed);
3095 LocalVariable* old_processed =
MakeTemporary(
"old_processed");
3104 check_names +=
Drop();
3107 check_names +=
Drop();
3108 check_names +=
Goto(loop);
3110 Fragment loop_check(loop);
3111 loop_check += LoadLocal(
info.vars->current_param_index);
3112 loop_check += LoadLocal(
info.num_opt_params);
3114 TargetEntryInstr* no_more;
3115 TargetEntryInstr* more;
3120 Fragment loop_body(more);
3122 loop_body += LoadLocal(
info.named_parameter_names);
3123 loop_body += LoadLocal(
info.vars->current_param_index);
3133 for (intptr_t
i = 0;
i <
info.descriptor.NamedCount();
i++) {
3136 loop_body += LoadLocal(param_name);
3137 TargetEntryInstr*
match;
3138 TargetEntryInstr* mismatch;
3140 loop_body.current = mismatch;
3145 Fragment matched(
match);
3146 matched += LoadLocal(
info.vars->current_param_index);
3147 matched += LoadLocal(
info.num_fixed_params);
3149 matched +=
StoreLocal(
info.vars->named_argument_parameter_indices.At(
i));
3151 matched += LoadLocal(
info.vars->current_num_processed);
3156 matched +=
Goto(loop_incr);
3161 loop_body += TestClosureFunctionNamedParameterRequired(
3166 loop_body +=
Goto(loop_incr);
3168 Fragment incr_index(loop_incr);
3170 incr_index += LoadLocal(
info.vars->current_param_index);
3174 incr_index +=
Drop();
3175 incr_index +=
Goto(loop);
3177 Fragment check_processed(
done);
3178 check_processed += LoadLocal(
info.vars->current_num_processed);
3180 TargetEntryInstr* all_processed;
3181 TargetEntryInstr* bad_name;
3182 check_processed +=
BranchIfEqual(&all_processed, &bad_name);
3185 Fragment(bad_name) +
Goto(
info.throw_no_such_method);
3188 check_names.current = all_processed;
3189 check_names += LoadLocal(old_processed);
3191 check_names +=
Drop();
3193 check_names += LoadLocal(old_index);
3195 check_names +=
Drop();
3200Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
3201 const ClosureCallInfo&
info) {
3202 Fragment check_entry;
3204 if (
info.descriptor.TypeArgsLen() > 0) {
3205 Fragment check_type_args_length;
3206 check_type_args_length += LoadLocal(
info.type_parameters);
3207 TargetEntryInstr* null;
3208 TargetEntryInstr* not_null;
3209 check_type_args_length +=
BranchIfNull(&null, ¬_null);
3210 check_type_args_length.current = not_null;
3211 check_type_args_length += LoadLocal(
info.signature);
3212 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
3214 Slot::FunctionType_packed_type_parameter_counts());
3216 TargetEntryInstr*
equal;
3217 TargetEntryInstr* not_equal;
3219 check_type_args_length.current =
equal;
3222 Fragment(null) +
Goto(
info.throw_no_such_method);
3225 Fragment(not_equal) +
Goto(
info.throw_no_such_method);
3231 check_type_args_length);
3234 check_entry += LoadLocal(
info.has_named_params);
3235 TargetEntryInstr* has_named;
3236 TargetEntryInstr* has_positional;
3237 check_entry +=
BranchIfTrue(&has_named, &has_positional);
3239 check_entry.current = join_after_optional;
3241 if (
info.descriptor.NamedCount() > 0) {
3243 Fragment(has_positional) +
Goto(
info.throw_no_such_method);
3245 Fragment check_pos(has_positional);
3246 check_pos += LoadLocal(
info.num_fixed_params);
3249 TargetEntryInstr* enough;
3250 TargetEntryInstr* too_few;
3252 check_pos.current = enough;
3254 Fragment(too_few) +
Goto(
info.throw_no_such_method);
3257 check_pos += LoadLocal(
info.num_max_params);
3259 TargetEntryInstr* valid;
3260 TargetEntryInstr* too_many;
3262 check_pos.current = valid;
3264 Fragment(too_many) +
Goto(
info.throw_no_such_method);
3266 check_pos +=
Goto(join_after_optional);
3269 Fragment check_named(has_named);
3271 TargetEntryInstr* same;
3272 TargetEntryInstr* different;
3273 check_named += LoadLocal(
info.num_fixed_params);
3276 check_named.current = same;
3278 Fragment(different) +
Goto(
info.throw_no_such_method);
3280 if (
info.descriptor.NamedCount() > 0) {
3282 check_named += LoadLocal(
info.num_opt_params);
3284 TargetEntryInstr* valid;
3285 TargetEntryInstr* too_many;
3287 check_named.current = valid;
3289 Fragment(too_many) +
Goto(
info.throw_no_such_method);
3294 check_named += BuildClosureCallNamedArgumentsCheck(
info);
3295 check_named +=
Goto(join_after_optional);
3297 check_entry.current = join_after_optional;
3301Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
3302 const ClosureCallInfo&
info) {
3311 loop_init += LoadLocal(
info.type_parameters);
3313 TargetEntryInstr* null_bounds;
3314 TargetEntryInstr* non_null_bounds;
3315 loop_init +=
BranchIfNull(&null_bounds, &non_null_bounds);
3317 Fragment(null_bounds) +
Goto(
done);
3319 loop_init.current = non_null_bounds;
3323 loop_init +=
Drop();
3324 loop_init +=
Goto(loop);
3326 Fragment loop_check(loop);
3327 loop_check += LoadLocal(
info.vars->current_param_index);
3328 loop_check += LoadLocal(
info.num_type_parameters);
3330 TargetEntryInstr* more;
3331 TargetEntryInstr* no_more;
3336 Fragment loop_test_flag(more);
3339 loop_test_flag += LoadLocal(
info.type_parameter_flags);
3340 TargetEntryInstr* null_flags;
3341 TargetEntryInstr* non_null_flags;
3342 loop_test_flag +=
BranchIfNull(&null_flags, &non_null_flags);
3346 loop_test_flag.current = non_null_flags;
3347 loop_test_flag += LoadLocal(
info.type_parameter_flags);
3348 loop_test_flag += LoadLocal(
info.vars->current_param_index);
3353 loop_test_flag += LoadLocal(
info.vars->current_param_index);
3360 TargetEntryInstr* is_noncovariant;
3361 TargetEntryInstr* is_covariant;
3362 loop_test_flag +=
BranchIfEqual(&is_noncovariant, &is_covariant);
3364 Fragment(is_covariant) +
Goto(
next);
3365 Fragment(is_noncovariant) +
Goto(
check);
3367 Fragment loop_prep_type_param(
check);
3372 loop_prep_type_param += LoadLocal(
info.vars->function_type_args);
3373 TargetEntryInstr* null_ftav;
3374 TargetEntryInstr* non_null_ftav;
3375 loop_prep_type_param +=
BranchIfNull(&null_ftav, &non_null_ftav);
3377 Fragment(null_ftav) +
Goto(dynamic_type_param);
3379 loop_prep_type_param.current = non_null_ftav;
3380 loop_prep_type_param += LoadLocal(
info.vars->function_type_args);
3381 loop_prep_type_param += LoadLocal(
info.vars->current_param_index);
3382 loop_prep_type_param += LoadLocal(
info.num_parent_type_args);
3383 loop_prep_type_param +=
SmiBinaryOp(Token::kADD,
true);
3386 loop_prep_type_param +=
StoreLocal(
info.vars->current_type_param);
3387 loop_prep_type_param +=
Drop();
3388 loop_prep_type_param +=
Goto(
call);
3390 Fragment loop_dynamic_type_param(dynamic_type_param);
3393 loop_dynamic_type_param +=
StoreLocal(
info.vars->current_type_param);
3394 loop_dynamic_type_param +=
Drop();
3395 loop_dynamic_type_param +=
Goto(
call);
3397 Fragment loop_call_check(
call);
3399 loop_call_check += LoadLocal(
info.instantiator_type_args);
3400 loop_call_check += LoadLocal(
info.vars->function_type_args);
3402 loop_call_check += LoadLocal(
info.vars->current_type_param);
3404 loop_call_check += LoadLocal(
info.type_parameters);
3406 loop_call_check += LoadLocal(
info.vars->current_param_index);
3410 loop_call_check += LoadLocal(
info.type_parameters);
3412 loop_call_check += LoadLocal(
info.vars->current_param_index);
3417 loop_call_check += AssertSubtype(TokenPosition::kNoSource);
3420 Fragment loop_incr(
next);
3421 loop_incr += LoadLocal(
info.vars->current_param_index);
3425 loop_incr +=
Drop();
3426 loop_incr +=
Goto(loop);
3428 return Fragment(loop_init.entry,
done);
3431Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
3432 const ClosureCallInfo&
info,
3433 LocalVariable* param_index,
3435 const String& arg_name) {
3436 Fragment instructions;
3441 instructions += LoadLocal(
info.parameter_types);
3442 instructions += LoadLocal(param_index);
3446 instructions += LoadLocal(
info.instantiator_type_args);
3448 instructions += LoadLocal(
info.vars->function_type_args);
3451 AssertAssignableInstr::kParameterCheck);
3454 instructions +=
Drop();
3456 return instructions;
3459Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3460 const ClosureCallInfo&
info) {
3461 Fragment instructions;
3465 for (intptr_t
i = 1;
i <
info.descriptor.PositionalCount();
i++) {
3470 instructions += BuildClosureCallArgumentTypeCheck(
3471 info, param_index,
i, Symbols::dynamic_assert_assignable_stc_check());
3475 for (intptr_t
i = 0;
i <
info.descriptor.NamedCount();
i++) {
3476 const intptr_t arg_index =
info.descriptor.PositionAt(
i);
3477 auto const param_index =
info.vars->named_argument_parameter_indices.At(
i);
3480 instructions += BuildClosureCallArgumentTypeCheck(
3481 info, param_index, arg_index,
3482 Symbols::dynamic_assert_assignable_stc_check());
3485 return instructions;
3488Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3495 body += LoadLocal(
info.closure);
3500 body += LoadLocal(
info.signature);
3501 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3503 Slot::FunctionType_packed_parameter_counts());
3506 body += LoadLocal(
info.signature);
3507 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3509 Slot::FunctionType_packed_parameter_counts());
3512 body += LoadLocal(
info.num_fixed_params);
3513 body += LoadLocal(
info.num_opt_params);
3517 body += LoadLocal(
info.signature);
3518 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3520 Slot::FunctionType_packed_parameter_counts());
3526 body += LoadLocal(
info.signature);
3530 body += LoadLocal(
info.signature);
3534 body += LoadLocal(
info.signature);
3538 body += LoadLocal(
info.closure);
3542 body += LoadLocal(
info.closure);
3549 body += BuildClosureCallArgumentsValidCheck(
info);
3554 Fragment not_generic;
3555 not_generic += LoadLocal(
info.parent_function_type_args);
3557 not_generic +=
Drop();
3565 generic += LoadLocal(
info.signature);
3566 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3568 Slot::FunctionType_packed_type_parameter_counts());
3572 generic += LoadLocal(
info.signature);
3573 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3575 Slot::FunctionType_packed_type_parameter_counts());
3579 generic += LoadLocal(
info.type_parameters);
3585 generic += BuildClosureCallDefaultTypeHandling(
info);
3588 generic += LoadLocal(
info.vars->function_type_args);
3590 generic += LoadLocal(
info.parent_function_type_args);
3592 generic += LoadLocal(
info.num_parent_type_args);
3594 generic += LoadLocal(
info.num_parent_type_args);
3595 generic += LoadLocal(
info.num_type_parameters);
3599 generic += StaticCall(TokenPosition::kNoSource,
3600 PrependTypeArgumentsFunction(), 4, ICData::kStatic);
3609 if (FLAG_eliminate_type_checks) {
3621 body += TestClosureFunctionGeneric(
info, generic, not_generic);
3625 body += BuildClosureCallArgumentTypeChecks(
info);
3642FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3652 const bool is_dynamic_call =
3654 if (is_dynamic_call) {
3662 const Class& closure_class =
3664 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3673 PrologueInfo prologue_info(-1, -1);
3674 BlockEntryInstr* instruction_cursor =
3675 BuildPrologue(normal_entry, &prologue_info);
3677 Fragment body(instruction_cursor);
3678 body += CheckStackOverflowInPrologue(
function.token_pos());
3682 LocalVariable*
closure =
nullptr;
3683 if (is_closure_call) {
3685 if (is_dynamic_call) {
3690 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3692 body += BuildDynamicClosureCallChecks(
closure);
3696 if (descriptor.TypeArgsLen() > 0) {
3698 ASSERT(type_args !=
nullptr);
3699 body += LoadLocal(type_args);
3702 if (is_closure_call) {
3708 const intptr_t kTypeArgsLen = 0;
3709 const intptr_t kNumArgsChecked = 1;
3711 kTypeArgsLen, 1, Array::null_array(), kNumArgsChecked);
3715 for (intptr_t
pos = 1;
pos < descriptor.Count();
pos++) {
3720 const Array* argument_names = &Object::null_array();
3721 if (descriptor.NamedCount() > 0) {
3722 const auto& array_handle =
3725 for (intptr_t
i = 0;
i < descriptor.NamedCount(); ++
i) {
3726 const intptr_t named_arg_index =
3727 descriptor.PositionAt(
i) - descriptor.PositionalCount();
3728 string_handle = descriptor.NameAt(
i);
3729 array_handle.SetAt(named_arg_index, string_handle);
3731 argument_names = &array_handle;
3734 if (is_closure_call) {
3736 if (!FLAG_precompiled_mode) {
3740 body +=
ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3741 descriptor.TypeArgsLen(), descriptor.Count(),
3744 const intptr_t kNumArgsChecked = 1;
3748 Token::kILLEGAL, descriptor.TypeArgsLen(),
3749 descriptor.Count(), *argument_names, kNumArgsChecked);
3752 body += Return(TokenPosition::kNoSource);
3759FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3761 bool is_implicit_closure_function,
3762 bool throw_no_such_method_error) {
3769 PrologueInfo prologue_info(-1, -1);
3770 BlockEntryInstr* instruction_cursor =
3771 BuildPrologue(normal_entry, &prologue_info);
3773 Fragment body(instruction_cursor);
3774 body += CheckStackOverflowInPrologue(
function.token_pos());
3779 if (is_implicit_closure_function && !
function.is_static()) {
3792 if (
function.NeedsTypeArgumentTypeChecks()) {
3797 if (
function.NeedsArgumentTypeChecks()) {
3798 BuildArgumentTypeChecks(&body, &body,
nullptr);
3807 body +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3813 otherwise +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3814 otherwise +=
Drop();
3818 if (
function.HasOptionalParameters()) {
3824 body += LoadLocal(argument_count_var);
3851 body +=
StoreLocal(TokenPosition::kNoSource, index);
3860 store += LoadLocal(arguments);
3862 store += LoadFunctionTypeArguments();
3870 TargetEntryInstr* body_entry;
3871 TargetEntryInstr* loop_exit;
3875 condition += LoadLocal(index);
3878 condition +=
BranchIfTrue(&body_entry, &loop_exit,
false);
3880 Fragment loop_body(body_entry);
3884 loop_body += LoadLocal(arguments);
3885 loop_body += LoadLocal(index);
3887 loop_body += LoadLocal(index);
3896 loop_body += LoadLocal(index);
3899 loop_body +=
StoreLocal(TokenPosition::kNoSource, index);
3900 loop_body +=
Drop();
3905 Fragment loop(
join);
3908 Instruction* entry =
3910 body += Fragment(entry, loop_exit);
3914 if (is_implicit_closure_function) {
3915 if (throw_no_such_method_error) {
3916 const Function& parent =
3942 body += LoadLocal(arguments);
3944 if (throw_no_such_method_error) {
3945 const Function& parent =
3954 }
else if (
function.IsImplicitSetterFunction() ||
3966 if (
function.IsClosureFunction()) {
3970 then +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3974 otherwise +=
StoreLocal(TokenPosition::kNoSource, argument_count_var);
3975 otherwise +=
Drop();
3977 body += LoadLocal(argument_count_var);
3982 const Class& mirror_class =
3984 ASSERT(!mirror_class.IsNull());
3985 const auto&
error = mirror_class.EnsureIsFinalized(
H.thread());
3989 Symbols::AllocateInvocationMirrorForClosure())));
3990 ASSERT(!allocation_function.IsNull());
3992 5, ICData::kStatic);
3994 if (throw_no_such_method_error) {
3998 const auto&
error = klass.EnsureIsFinalized(
H.thread());
4002 klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNewInvocation()));
4003 ASSERT(!throw_function.IsNull());
4004 body += StaticCall(TokenPosition::kNoSource, throw_function, 2,
4007 body += InstanceCall(
4008 TokenPosition::kNoSource, Symbols::NoSuchMethod(), Token::kILLEGAL,
4009 0, 2, Array::null_array(),
4019 if (!return_type.IsTopTypeForSubtyping()) {
4020 body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
4023 body += Return(TokenPosition::kNoSource);
4030Fragment FlowGraphBuilder::BuildDefaultTypeHandling(
const Function&
function) {
4031 Fragment keep_same, use_defaults;
4033 if (!
function.IsGeneric())
return keep_same;
4035 const auto& default_types =
4038 if (default_types.IsNull())
return keep_same;
4040 if (
function.IsClosureFunction()) {
4047 auto const mode =
function.default_type_arguments_instantiation_mode();
4051 use_defaults +=
Constant(default_types);
4054 use_defaults += LoadLocal(
closure);
4059 use_defaults += LoadLocal(
closure);
4067 use_defaults += LoadLocal(
closure);
4073 if (!default_types.IsInstantiated(
kFunctions)) {
4074 use_defaults += LoadLocal(
closure);
4084 use_defaults += TranslateInstantiatedTypeArguments(default_types);
4087 use_defaults +=
Drop();
4092FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
4093 Fragment shared_prologue_linked_in,
4094 Fragment skippable_checks,
4095 Fragment redefinitions_if_skipped,
4099 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
4103 Fragment normal_entry(shared_prologue_linked_in.entry);
4106 normal_entry +=
StoreLocal(TokenPosition::kNoSource,
4108 normal_entry +=
Drop();
4109 normal_entry +=
Goto(join_entry);
4112 Fragment extra_entry(extra_target_entry);
4115 extra_entry +=
StoreLocal(TokenPosition::kNoSource,
4117 extra_entry +=
Drop();
4118 extra_entry +=
Goto(join_entry);
4120 if (prologue_start !=
nullptr) {
4121 join_entry->LinkTo(prologue_start);
4124 shared_prologue_linked_in.current = join_entry;
4127 TargetEntryInstr* do_checks;
4128 TargetEntryInstr* skip_checks;
4129 shared_prologue_linked_in +=
4132 shared_prologue_linked_in +=
4136 shared_prologue_linked_in +=
4141 Fragment(do_checks) + skippable_checks +
Goto(rest_entry);
4142 Fragment(skip_checks) + redefinitions_if_skipped +
Goto(rest_entry);
4143 Fragment(rest_entry) + body;
4145 return extra_target_entry;
4148FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
4149 BlockEntryInstr* normal_entry,
4150 Fragment normal_prologue,
4151 Fragment extra_prologue,
4152 Fragment shared_prologue,
4157 Fragment
normal(normal_entry);
4160 normal += normal_prologue;
4163 Fragment extra(extra_entry);
4167 extra += extra_prologue;
4168 extra +=
Goto(join_entry);
4170 Fragment(join_entry) + shared_prologue + body;
4174FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
4180 (parent.num_fixed_parameters() !=
target.num_fixed_parameters())) {
4181 return BuildGraphOfNoSuchMethodForwarder(
function,
true,
4182 parent.is_static());
4191 PrologueInfo prologue_info(-1, -1);
4192 BlockEntryInstr* instruction_cursor =
4193 BuildPrologue(normal_entry, &prologue_info);
4195 Fragment
closure(instruction_cursor);
4209 intptr_t type_args_len = 0;
4211 if (
target.IsConstructor()) {
4213 ASSERT(result_type.IsFinalized());
4220 Z, Type::Cast(result_type).GetInstanceTypeArguments(
H.thread()));
4222 TranslateInstantiatedTypeArguments(instantiated_type_arguments);
4224 type_args_len =
function.NumTypeParameters();
4228 }
else if (
target.IsFactory()) {
4235 if (
target.IsGenerativeConstructor()) {
4237 if (cls.NumTypeArguments() > 0) {
4240 Z, cls.GetDeclarationInstanceTypeArguments()));
4248 closure += LoadLocal(receiver);
4249 }
else if (!
target.is_static()) {
4260 target.NumImplicitParameters();
4263 Array& argument_names =
4267 argument_names, ICData::kNoRebind,
4268 nullptr, type_args_len);
4270 if (
target.IsGenerativeConstructor()) {
4284FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
4287 function.IsDynamicInvocationForwarder());
4298 const bool is_method = !
function.IsStaticFunction();
4299 const bool is_setter =
target.IsImplicitSetterFunction();
4300 const bool is_getter =
target.IsImplicitGetterFunction() ||
4301 target.IsImplicitStaticGetterFunction();
4302 ASSERT(is_setter || is_getter);
4312 Fragment body(normal_entry);
4314 auto const setter_value =
4319 body += LoadLocal(setter_value);
4325 const bool needs_type_check =
function.IsDynamicInvocationForwarder() ||
4326 setter_value->needs_type_check();
4327 if (needs_type_check) {
4328 body += CheckAssignable(setter_value->static_type(), setter_value->name(),
4329 AssertAssignableInstr::kParameterCheck,
4332 if (field.is_late()) {
4337 body += StoreLateField(
4353 field, field.NeedsInitializationCheckOnLoad());
4354 }
else if (field.is_const()) {
4356 if (
value.IsError()) {
4368 ASSERT(field.has_nontrivial_initializer() ||
4369 (field.is_late() && !field.has_initializer()));
4373 if (is_method || !field.is_const()) {
4380 Fragment load_guard = CheckAssignable(
4382 if (field.needs_load_guard()) {
4389 body += Return(TokenPosition::kNoSource);
4391 PrologueInfo prologue_info(-1, -1);
4397FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
4404 if (
target.IsImplicitSetterFunction() ||
target.IsImplicitGetterFunction()) {
4405 return BuildGraphOfFieldAccessor(
function);
4407 if (
target.IsMethodExtractor()) {
4408 return BuildGraphOfMethodExtractor(
target);
4411 return BuildGraphOfRecognizedMethod(
function);
4414 graph_entry_ =
new (
Z) GraphEntryInstr(*parsed_function_,
osr_id_);
4419 PrologueInfo prologue_info(-1, -1);
4420 auto instruction_cursor = BuildPrologue(normal_entry, &prologue_info);
4424 body += CheckStackOverflowInPrologue(
function.token_pos());
4435 body += BuildDefaultTypeHandling(
function);
4439 BuildTypeArgumentTypeChecks(
4441 BuildArgumentTypeChecks(&body, &body,
nullptr);
4445 intptr_t type_args_len = 0;
4447 type_args_len =
function.NumTypeParameters();
4458 const auto& argument_names =
4462 argument_names, ICData::kNoRebind,
nullptr, type_args_len);
4464 if (
target.has_unboxed_integer_return()) {
4465 body +=
Box(kUnboxedInt64);
4466 }
else if (
target.has_unboxed_double_return()) {
4467 body +=
Box(kUnboxedDouble);
4468 }
else if (
target.has_unboxed_record_return()) {
4477 if (
name.ptr() == Symbols::AssignIndexToken().ptr()) {
4482 body += Return(TokenPosition::kNoSource);
4484 instruction_cursor->LinkTo(body.entry);
4498void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4499 const Fragment& fragment,
4502 ASSERT(fragment.current->IsDefinition());
4505 fragment.current->AsDefinition()->set_range(range);
4509 switch (unboxed_representation) {
4513 return kTypedDataFloat32ArrayCid;
4515 return kTypedDataInt32ArrayCid;
4516 case kUnboxedUint32:
4517 return kTypedDataUint32ArrayCid;
4519 return kTypedDataInt64ArrayCid;
4520 case kUnboxedDouble:
4521 return kTypedDataFloat64ArrayCid;
4528Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4530 intptr_t index_scale,
4531 bool index_unboxed) {
4532 ASSERT(unboxed_representation == kUnboxedInt32 ||
4533 unboxed_representation == kUnboxedUint32 ||
4534 unboxed_representation == kUnboxedInt64 ||
4535 unboxed_representation == kUnboxedFloat ||
4536 unboxed_representation == kUnboxedDouble);
4538 if (unboxed_representation == kUnboxedFloat) {
4539 fragment += BitCast(kUnboxedFloat, kUnboxedInt32);
4540 unboxed_representation = kUnboxedInt32;
4543 index_scale, index_unboxed);
4547Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4549 intptr_t index_scale,
4550 bool index_unboxed) {
4551 ASSERT(unboxed_representation == kUnboxedInt32 ||
4552 unboxed_representation == kUnboxedUint32 ||
4553 unboxed_representation == kUnboxedInt64 ||
4554 unboxed_representation == kUnboxedFloat ||
4555 unboxed_representation == kUnboxedDouble);
4557 if (unboxed_representation == kUnboxedFloat) {
4558 representation_for_load = kUnboxedInt32;
4562 index_scale, index_unboxed);
4563 if (unboxed_representation == kUnboxedFloat) {
4564 fragment += BitCast(kUnboxedInt32, kUnboxedFloat);
4569Fragment FlowGraphBuilder::UnhandledException() {
4571 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4579 body += LoadLocal(error_instance);
4580 body += LoadLocal(CurrentException());
4585 body += LoadLocal(error_instance);
4586 body += LoadLocal(CurrentStackTrace());
4595 auto const unbox_to = to == kUnboxedFloat ? kUnboxedDouble : to;
4596 Fragment instructions;
4599 instructions <<= unbox;
4601 if (to == kUnboxedFloat) {
4604 return instructions;
4607Fragment FlowGraphBuilder::LoadThread() {
4608 LoadThreadInstr* instr =
new (
Z) LoadThreadInstr();
4610 return Fragment(instr);
4613Fragment FlowGraphBuilder::LoadIsolate() {
4615 body += LoadThread();
4620Fragment FlowGraphBuilder::LoadIsolateGroup() {
4622 body += LoadThread();
4627Fragment FlowGraphBuilder::LoadObjectStore() {
4629 body += LoadIsolateGroup();
4634Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4636 body += LoadThread();
4642Fragment FlowGraphBuilder::BoolToInt() {
4648 Fragment instructions;
4649 TargetEntryInstr* is_true;
4650 TargetEntryInstr* is_false;
4656 Fragment store_1(is_true);
4658 store_1 +=
StoreLocal(TokenPosition::kNoSource, expression_temp);
4664 Fragment store_0(is_false);
4666 store_0 +=
StoreLocal(TokenPosition::kNoSource, expression_temp);
4671 instructions = Fragment(instructions.entry,
join);
4672 instructions += LoadLocal(expression_temp);
4673 return instructions;
4676Fragment FlowGraphBuilder::IntToBool() {
4683Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4688 RelationalOpInstr* instr =
new (
Z) RelationalOpInstr(
4689 InstructionSource(position), kind,
left,
right, kMintCid,
4690 GetNextDeoptId(), Instruction::SpeculativeMode::kNotSpeculative);
4692 return Fragment(instr);
4694 const String*
name =
nullptr;
4703 name = &Symbols::LessEqualOperator();
4706 name = &Symbols::GreaterEqualOperator();
4711 return InstanceCall(
4712 position, *
name, kind, 0, 2,
4713 Array::null_array(), 2);
4716Fragment FlowGraphBuilder::NativeReturn(
4717 const compiler::ffi::CallbackMarshaller& marshaller) {
4718 const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
4719 if (num_return_defs == 1) {
4720 auto* instr =
new (
Z) NativeReturnInstr(
Pop(), marshaller);
4721 return Fragment(instr).closed();
4725 auto* typed_data_base =
Pop();
4726 auto* instr =
new (
Z) NativeReturnInstr(typed_data_base,
offset, marshaller);
4727 return Fragment(instr).closed();
4731 BitCastInstr* instr =
new (
Z) BitCastInstr(from, to,
Pop());
4733 return Fragment(instr);
4736Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4738 Call1ArgStubInstr* instr =
new (
Z) Call1ArgStubInstr(
4741 return Fragment(instr);
4744Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4749 SuspendInstr* instr =
4750 new (
Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4753 return Fragment(instr);
4756Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4757 const AbstractType& compound_type) {
4758 const auto& compound_sub_class =
4760 compound_sub_class.EnsureIsFinalized(thread_);
4765 LocalVariable* typed_data =
MakeTemporary(
"typed_data_base");
4766 body +=
AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4768 body += LoadLocal(compound);
4769 body += LoadLocal(typed_data);
4772 body += LoadLocal(compound);
4780Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4788Fragment FlowGraphBuilder::LoadOffsetInBytesFromCompound() {
4796Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4797 ZoneGrowableArray<LocalVariable*>* definitions,
4798 const GrowableArray<Representation>& representations) {
4800 const intptr_t num_defs = representations.length();
4801 ASSERT(definitions->length() == num_defs);
4804 int offset_in_bytes = 0;
4805 for (intptr_t
i = 0;
i < num_defs;
i++) {
4807 body += LoadLocal(uint8_list);
4809 body += LoadLocal(definitions->At(
i));
4810 body += StoreIndexedTypedDataUnboxed(representation, 1,
4823 if (bytes_left >= 4) {
4826 if (bytes_left >= 2) {
4835 return kTypedDataInt64ArrayCid;
4837 return kTypedDataInt32ArrayCid;
4839 return kTypedDataInt16ArrayCid;
4841 return kTypedDataInt8ArrayCid;
4852 return kExternalTypedDataInt64ArrayCid;
4854 return kExternalTypedDataInt32ArrayCid;
4856 return kExternalTypedDataInt16ArrayCid;
4858 return kExternalTypedDataInt8ArrayCid;
4865 intptr_t offset_in_bytes,
4869 body += LoadLocal(variable);
4870 body += LoadTypedDataBaseFromCompound();
4871 body += LoadLocal(variable);
4872 body += LoadOffsetInBytesFromCompound();
4875 body += LoadIndexedTypedDataUnboxed(representation, 1,
4879 ASSERT(representation != kUnboxedFloat);
4880 ASSERT(representation != kUnboxedDouble);
4882 intptr_t remaining =
size;
4883 auto step = [&](intptr_t part_bytes, intptr_t part_cid) {
4884 while (remaining >= part_bytes) {
4885 body += LoadLocal(variable);
4886 body += LoadTypedDataBaseFromCompound();
4887 body += LoadLocal(variable);
4888 body += LoadOffsetInBytesFromCompound();
4902 offset_in_bytes += part_bytes;
4903 remaining -= part_bytes;
4907 step(8, kTypedDataUint64ArrayCid);
4908 step(4, kTypedDataUint32ArrayCid);
4909 step(2, kTypedDataUint16ArrayCid);
4910 step(1, kTypedDataUint8ArrayCid);
4916 if (from_representation != representation) {
4917 IntConverterInstr*
convert =
new IntConverterInstr(
4927Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4928 LocalVariable* variable,
4929 const compiler::ffi::BaseMarshaller& marshaller,
4930 intptr_t arg_index) {
4932 const auto& native_loc = marshaller.Location(arg_index);
4933 if (native_loc.IsMultiple()) {
4934 const auto& multiple_loc = native_loc.AsMultiple();
4935 intptr_t offset_in_bytes = 0;
4936 for (intptr_t
i = 0;
i < multiple_loc.locations().
length();
i++) {
4937 const auto& loc = *multiple_loc.locations()[
i];
4939 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
4942 representation = loc.container_type().AsRepresentationOverApprox(
Z);
4946 representation = loc.payload_type().AsRepresentationOverApprox(
Z);
4948 intptr_t
size = loc.payload_type().SizeInBytes();
4949 body += LoadTail(variable,
size, offset_in_bytes, representation);
4950 offset_in_bytes +=
size;
4952 }
else if (native_loc.IsStack()) {
4956 intptr_t remaining = native_loc.payload_type().SizeInBytes();
4957 intptr_t offset_in_bytes = 0;
4964 if (remaining > 0) {
4965 body += LoadTail(variable, remaining, offset_in_bytes, representation);
4968 ASSERT(native_loc.IsPointerToMemory());
4970 body += LoadLocal(variable);
4971 body += LoadTypedDataBaseFromCompound();
4972 body += LoadLocal(variable);
4973 body += LoadOffsetInBytesFromCompound();
4979Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
4980 const compiler::ffi::BaseMarshaller& marshaller,
4981 intptr_t arg_index) {
4985 const auto& compound_type =
4987 body += WrapTypedDataBaseInCompound(compound_type);
4991Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
4992 const compiler::ffi::BaseMarshaller& marshaller,
4994 ZoneGrowableArray<LocalVariable*>* definitions) {
4995 const intptr_t length_in_bytes =
4996 marshaller.Location(arg_index).payload_type().SizeInBytes();
4999 if (marshaller.Location(arg_index).IsMultiple()) {
5005 const auto& multiple_loc = marshaller.Location(arg_index).AsMultiple();
5006 const intptr_t num_defs = multiple_loc.locations().length();
5007 intptr_t offset_in_bytes = 0;
5008 for (intptr_t
i = 0;
i < num_defs;
i++) {
5009 const auto& loc = *multiple_loc.locations()[
i];
5011 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
5014 representation = loc.container_type().AsRepresentationOverApprox(
Z);
5018 representation = loc.payload_type().AsRepresentationOverApprox(
Z);
5020 body += LoadLocal(uint8_list);
5022 body += LoadLocal(definitions->At(
i));
5023 body += StoreIndexedTypedDataUnboxed(representation, 1,
5025 offset_in_bytes += loc.payload_type().SizeInBytes();
5029 }
else if (marshaller.Location(arg_index).IsStack()) {
5034 GrowableArray<Representation> representations;
5035 marshaller.RepsInFfiCall(arg_index, &representations);
5036 body += PopFromStackToTypedDataBase(definitions, representations);
5038 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
5040 LocalVariable* address_of_compound =
MakeTemporary(
"address_of_compound");
5044 LocalVariable* typed_data_base =
MakeTemporary(
"typed_data_base");
5045 intptr_t offset_in_bytes = 0;
5046 while (offset_in_bytes < length_in_bytes) {
5047 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5048 const intptr_t chunk_sizee =
chunk_size(bytes_left);
5050 body += LoadLocal(address_of_compound);
5057 body += LoadLocal(typed_data_base);
5059 body += LoadLocal(chunk_value);
5065 offset_in_bytes += chunk_sizee;
5067 ASSERT(offset_in_bytes == length_in_bytes);
5071 const auto& compound_type =
5073 body += WrapTypedDataBaseInCompound(compound_type);
5077Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
5078 const compiler::ffi::CallbackMarshaller& marshaller,
5079 intptr_t arg_index) {
5081 const auto& native_loc = marshaller.Location(arg_index);
5082 if (native_loc.IsMultiple()) {
5086 body += LoadLocal(compound);
5087 body += LoadOffsetInBytesFromCompound();
5092 body += LoadTypedDataBaseFromCompound();
5095 ASSERT(native_loc.IsPointerToMemory());
5097 const intptr_t length_in_bytes =
5098 marshaller.Location(arg_index).payload_type().SizeInBytes();
5101 body += LoadLocal(compound);
5102 body += LoadTypedDataBaseFromCompound();
5103 LocalVariable* typed_data_base =
MakeTemporary(
"typed_data_base");
5104 body += LoadLocal(compound);
5105 body += LoadOffsetInBytesFromCompound();
5108 auto* pointer_to_return =
5110 Push(pointer_to_return);
5111 body <<= pointer_to_return;
5112 LocalVariable* unboxed_address =
MakeTemporary(
"unboxed_address");
5114 intptr_t offset_in_bytes = 0;
5115 while (offset_in_bytes < length_in_bytes) {
5116 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5117 const intptr_t chunk_sizee =
chunk_size(bytes_left);
5119 body += LoadLocal(typed_data_base);
5120 body += LoadLocal(
offset);
5127 body += LoadLocal(unboxed_address);
5129 body += LoadLocal(chunk_value);
5135 offset_in_bytes += chunk_sizee;
5138 ASSERT(offset_in_bytes == length_in_bytes);
5144Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
5145 const compiler::ffi::BaseMarshaller& marshaller,
5146 intptr_t arg_index) {
5147 ASSERT(!marshaller.IsCompoundCType(arg_index));
5150 if (marshaller.IsPointerPointer(arg_index)) {
5151 Class& result_class =
5154 result_class.EnsureIsFinalized(thread_);
5156 TypeArguments&
args =
5169 body +=
AllocateObject(TokenPosition::kNoSource, result_class, 1);
5171 body += LoadLocal(address);
5176 body += LoadLocal(
result);
5177 }
else if (marshaller.IsTypedDataPointer(arg_index)) {
5179 }
else if (marshaller.IsCompoundPointer(arg_index)) {
5181 }
else if (marshaller.IsHandleCType(arg_index)) {
5185 }
else if (marshaller.IsVoid(arg_index)) {
5191 if (marshaller.RequiresBitCast(arg_index)) {
5193 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)),
5194 marshaller.RepInDart(arg_index));
5197 body +=
Box(marshaller.RepInDart(arg_index));
5199 if (marshaller.IsBool(arg_index)) {
5200 body += IntToBool();
5206Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
5207 const compiler::ffi::BaseMarshaller& marshaller,
5209 LocalVariable* variable) {
5210 ASSERT(!marshaller.IsCompoundCType(arg_index));
5213 if (marshaller.IsPointerPointer(arg_index)) {
5217 }
else if (marshaller.IsTypedDataPointer(arg_index)) {
5219 }
else if (marshaller.IsCompoundPointer(arg_index)) {
5220 ASSERT(variable !=
nullptr);
5221 body += LoadTypedDataBaseFromCompound();
5222 body += LoadLocal(variable);
5223 body += LoadOffsetInBytesFromCompound();
5225 }
else if (marshaller.IsHandleCType(arg_index)) {
5232 auto*
const arg_reps =
5233 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5236 body += LoadThread();
5238 arg_reps->Add(kUntagged);
5242 CallLeafRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
5247 body += LoadLocal(handle);
5248 body += LoadLocal(
object);
5253 }
else if (marshaller.IsVoid(arg_index)) {
5259 if (marshaller.IsBool(arg_index)) {
5260 body += BoolToInt();
5263 body += UnboxTruncate(marshaller.RepInDart(arg_index));
5266 if (marshaller.RequiresBitCast(arg_index)) {
5268 marshaller.RepInDart(arg_index),
5269 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)));
5275FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
5277 switch (
function.GetFfiCallbackKind()) {
5280 return BuildGraphOfSyncFfiCallback(
function);
5282 return BuildGraphOfAsyncFfiCallback(
function);
5288Fragment FlowGraphBuilder::FfiNativeLookupAddress(
5292 .Equals(Symbols::FfiNative()));
5293 const auto& native_class_fields =
Array::Handle(
Z, native_class.fields());
5294 ASSERT(native_class_fields.Length() == 4);
5295 const auto& symbol_field =
5297 ASSERT(!symbol_field.is_static());
5298 const auto& asset_id_field =
5300 ASSERT(!asset_id_field.is_static());
5301 const auto& symbol =
5303 const auto& asset_id =
5306 ASSERT(type_args.Length() == 1);
5309 if (native_type.IsFunctionType()) {
5310 const auto& native_function_type = FunctionType::Cast(native_type);
5311 arg_n = native_function_type.NumParameters() -
5312 native_function_type.num_implicit_parameters();
5317 const auto& ffi_resolver =
5319#if !defined(TARGET_ARCH_IA32)
5326 CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
5328 Array::null_array(),
5334 char*
error =
nullptr;
5335#if !defined(DART_PRECOMPILER) || defined(TESTING)
5336 const uintptr_t function_address =
5339 const uintptr_t function_address = 0;
5342 if (
error ==
nullptr) {
5357 body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
5358 3, ICData::kStatic);
5366Fragment FlowGraphBuilder::FfiNativeFunctionBody(
const Function&
function) {
5371 const auto& c_signature =
5373 auto const& native_instance =
5377 body += FfiNativeLookupAddress(native_instance);
5378 body += FfiCallFunctionBody(
function, c_signature,
5383Fragment FlowGraphBuilder::FfiCallFunctionBody(
5385 const FunctionType& c_signature,
5386 intptr_t first_argument_parameter_offset) {
5393 const char*
error =
nullptr;
5395 Z,
function, first_argument_parameter_offset, c_signature, &
error);
5400 const auto& marshaller = *marshaller_ptr;
5402 const bool signature_contains_handles = marshaller.ContainsHandles();
5410 const intptr_t num_args = marshaller.num_args();
5411 for (intptr_t
i = 0;
i < num_args;
i++) {
5412 if (marshaller.IsHandleCType(
i)) {
5416 first_argument_parameter_offset +
i));
5421 Z,
function.ParameterNameAt(first_argument_parameter_offset +
i)),
5425 first_argument_parameter_offset +
i));
5429 intptr_t try_handler_index = -1;
5430 if (signature_contains_handles) {
5434 body += TryCatch(try_handler_index);
5441 auto*
const arg_reps =
5442 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5444 body += LoadThread();
5445 arg_reps->Add(kUntagged);
5447 body += CallLeafRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged,
5452 LocalVariable* return_compound_typed_data =
nullptr;
5453 if (marshaller.ReturnsCompound()) {
5454 body +=
IntConstant(marshaller.CompoundReturnSizeInBytes());
5461 for (intptr_t
i = 0;
i < marshaller.num_args();
i++) {
5462 if (marshaller.IsCompoundCType(
i)) {
5463 body += FfiCallConvertCompoundArgumentToNative(
5469 first_argument_parameter_offset +
i));
5473 if (!marshaller.IsHandleCType(
i)) {
5474 body += FfiConvertPrimitiveToNative(
5477 first_argument_parameter_offset +
i));
5482 body += LoadLocal(address);
5484 if (marshaller.ReturnsCompound()) {
5485 body += LoadLocal(return_compound_typed_data);
5488 body += FfiCall(marshaller,
function.FfiIsLeaf());
5490 const intptr_t num_defs = marshaller.NumReturnDefinitions();
5492 auto defs =
new (
Z) ZoneGrowableArray<LocalVariable*>(
Z, num_defs);
5496 if (marshaller.ReturnsCompound()) {
5502 body += FfiCallConvertCompoundReturnToDart(marshaller,
5508 auto exit_handle_scope = [&]() -> Fragment {
5510 auto*
const arg_reps =
5511 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5513 code += LoadThread();
5514 arg_reps->Add(kUntagged);
5516 code += CallLeafRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged,
5522 if (signature_contains_handles) {
5526 body += exit_handle_scope();
5530 body += Return(TokenPosition::kNoSource);
5532 if (signature_contains_handles) {
5535 Fragment catch_body =
5536 CatchBlockEntry(Array::empty_array(), try_handler_index,
5543 catch_body += exit_handle_scope();
5545 catch_body += LoadLocal(CurrentException());
5546 catch_body += LoadLocal(CurrentStackTrace());
5547 catch_body += RethrowException(TokenPosition::kNoSource, try_handler_index);
5554Fragment FlowGraphBuilder::LoadNativeArg(
5555 const compiler::ffi::CallbackMarshaller& marshaller,
5556 intptr_t arg_index) {
5557 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
5558 auto defs =
new (
Z) ZoneGrowableArray<LocalVariable*>(
Z, num_defs);
5561 for (intptr_t j = 0; j < num_defs; j++) {
5562 const intptr_t def_index = marshaller.DefinitionIndex(j, arg_index);
5563 auto* parameter =
new (
Z) NativeParameterInstr(marshaller, def_index);
5565 fragment <<= parameter;
5570 if (marshaller.IsCompoundCType(arg_index)) {
5572 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index,
defs);
5574 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
5579FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
5581 const char*
error =
nullptr;
5582 const auto marshaller_ptr =
5588 const auto& marshaller = *marshaller_ptr;
5589 const bool is_closure =
function.GetFfiCallbackKind() ==
5595 auto*
const native_entry =
5601 Fragment function_body(native_entry);
5602 function_body += CheckStackOverflowInPrologue(
function.token_pos());
5607 Fragment body = TryCatch(try_handler_index);
5610 LocalVariable*
closure =
nullptr;
5613 body += LoadThread();
5621 for (intptr_t
i = 0;
i < marshaller.num_args();
i++) {
5622 body += LoadNativeArg(marshaller,
i);
5630 if (!FLAG_precompiled_mode) {
5637 ClosureCall(Function::null_function(), TokenPosition::kNoSource,
5644 body += StaticCall(TokenPosition::kNoSource,
5646 marshaller.num_args(), Array::empty_array(),
5658 body += FfiCallbackConvertCompoundReturnToNative(
5665 body += NativeReturn(marshaller);
5668 function_body += body;
5671 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5686 catch_body += UnhandledException();
5692 const intptr_t
size =
5700 catch_body += WrapTypedDataBaseInCompound(
5702 catch_body += FfiCallbackConvertCompoundReturnToNative(
5712 catch_body += NativeReturn(marshaller);
5715 PrologueInfo prologue_info(-1, -1);
5721FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5723 const char*
error =
nullptr;
5724 const auto marshaller_ptr =
5730 const auto& marshaller = *marshaller_ptr;
5739 auto*
const native_entry =
5745 Fragment function_body(native_entry);
5746 function_body += CheckStackOverflowInPrologue(
function.token_pos());
5751 Fragment body = TryCatch(try_handler_index);
5759 for (intptr_t
i = 0;
i < marshaller.num_args();
i++) {
5760 body += LoadLocal(array);
5762 body += LoadNativeArg(marshaller,
i);
5767 body += Call1ArgStub(TokenPosition::kNoSource,
5772 body += NativeReturn(marshaller);
5775 function_body += body;
5778 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5788 catch_body += NativeReturn(marshaller);
5791 PrologueInfo prologue_info(-1, -1);
5797void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5798 try_catch_block_ = try_catch_block;
5800 : try_catch_block->try_index());
5803const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5804 if (prepend_type_arguments_.
IsNull()) {
5806 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5807 Symbols::PrependTypeArguments());
5810 return prepend_type_arguments_;
5813Fragment FlowGraphBuilder::BuildIntegerHashCode(
bool smi) {
5816 HashIntegerOpInstr*
hash =
5823Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5829 body +=
Box(kUnboxedInt64);
5838 intptr_t case_count)
5840 position_(position),
5841 is_exhaustive_(is_exhaustive),
5842 expression_type_(expression_type),
5843 switch_block_(switch_block),
5844 case_count_(case_count),
5845 case_bodies_(case_count),
5846 case_expression_counts_(case_count),
5847 expressions_(case_count),
5848 sorted_expressions_(case_count) {
5853 is_optimizable_ =
true;
5857 is_optimizable_ =
true;
5858 is_enum_switch_ =
true;
5867 const uint64_t diff =
static_cast<uint64_t
>(
max) -
static_cast<uint64_t
>(
min);
5869 if (diff >
static_cast<uint64_t
>(
kMaxInt64 - 1)) {
5872 return static_cast<int64_t
>(diff + 1);
5897 const intptr_t kJumpTableMinExpressions = 16;
5900 const intptr_t kJumpTableMaxSize =
kMaxInt32;
5904 const double kJumpTableMaxHolesRatio = 1.0;
5921 PrepareForOptimizedSwitch();
5934 if (range > kJumpTableMaxSize) {
5938 const intptr_t num_expressions =
expressions().length();
5939 ASSERT(num_expressions <= range);
5941 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5942 const int64_t holes = range - num_expressions;
5945 if (num_expressions < kJumpTableMinExpressions) {
5949 if (holes > max_holes) {
5979 kJumpTableMaxSize - range);
5982 if (required_holes <= holes_budget) {
5983 expression_min_ = &Object::smi_zero();
5990void SwitchHelper::PrepareForOptimizedSwitch() {
5993 const Field* enum_index_field =
nullptr;
5994 for (intptr_t
i = 0;
i < expressions_.length(); ++
i) {
5996 sorted_expressions_.Add(&expression);
5999 const Integer* integer =
nullptr;
6001 if (enum_index_field ==
nullptr) {
6008 integer = &Integer::Cast(
value);
6012 expression_min_ = integer;
6013 expression_max_ = integer;
6016 expression_min_ = integer;
6019 expression_max_ = integer;
6025 sorted_expressions_.Sort(
6026 [](SwitchExpression*
const*
a, SwitchExpression*
const*
b) {
6027 return (*a)->integer().CompareWith((*b)->integer());
6033 for (intptr_t
i = 0;
i < sorted_expressions_.length() - 1; ++
i) {
6034 const SwitchExpression&
a = *sorted_expressions_.At(
i);
6035 const SwitchExpression&
b = *sorted_expressions_.At(
i + 1);
6036 if (
a.integer().Equals(
b.integer())) {
6037 is_optimizable_ =
false;
6046 case_expression_counts_[case_index]++;
6050 if (is_optimizable_) {
6052 if (!
value.IsInstanceOf(expression_type_, Object::null_type_arguments(),
6053 Object::null_type_arguments())) {
6054 is_optimizable_ =
false;
static int step(int x, SkScalar min, SkScalar max)
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static bool equal(const SkBitmap &a, const SkBitmap &b)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
void check_bounds(skiatest::Reporter *reporter, const SkPath &path)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define ASSERT_NOTNULL(ptr)
#define CLASS_LIST_TYPED_DATA(V)
#define DART_CLASS_LIST_TYPED_DATA(V)
virtual bool HasTypeClass() const
Nullability nullability() const
virtual ClassPtr type_class() const
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
void FillWith(const T &value, intptr_t start, intptr_t length)
static const Bool & False()
static const Bool & True()
@ kDeeplyImmutableAttachNativeFinalizer
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
UntaggedClosureData::PackedInstantiationMode PackedInstantiationMode
static CompileType Dynamic()
intptr_t GetNextDeoptId()
static CompilerState & Current()
static constexpr intptr_t kNoOSRDeoptId
static constexpr intptr_t kNone
static intptr_t InputCountForMarshaller(const compiler::ffi::CallMarshaller &marshaller)
static StringPtr GetterSymbol(const String &field_name)
static bool IsGetterName(const String &function_name)
static StringPtr NameFromGetter(const String &getter_name)
static bool SupportsUnboxedSimd128()
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
UntaggedFunctionType::PackedNumOptionalParameters PackedNumOptionalParameters
UntaggedFunctionType::PackedNumFixedParameters PackedNumFixedParameters
UntaggedFunctionType::PackedHasNamedOptionalParameters PackedHasNamedOptionalParameters
static bool IsDynamicInvocationForwarderName(const String &name)
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
bool IsClosureFunction() const
KernelProgramInfoPtr KernelProgramInfo() const
intptr_t NumParameters() const
const char * ToLibNamePrefixedQualifiedCString() const
static bool UseUnboxedRepresentation()
void RelinkToOsrEntry(Zone *zone, intptr_t max_block_id)
void AddCatchEntry(CatchBlockEntryInstr *entry)
FunctionEntryInstr * normal_entry() const
void set_normal_entry(FunctionEntryInstr *entry)
ObjectPtr GetField(const Field &field) const
static intptr_t ElementSizeFor(intptr_t cid)
virtual TypeArgumentsPtr GetTypeArguments() const
virtual Representation representation() const
virtual int CompareWith(const Integer &other) const
virtual int64_t AsInt64Value() const
static int EncodeType(Level level, Kind kind)
ClassTable * class_table() const
static LeafRuntimeCallInstr * Make(Zone *zone, Representation return_representation, const ZoneGrowableArray< Representation > &argument_representations, InputsArray &&inputs)
static ClassPtr LookupCoreClass(const String &class_name)
static const String & PrivateCoreLibName(const String &member)
static LibraryPtr InternalLibrary()
static Representation ReturnRepresentation(intptr_t array_cid)
int num_context_variables() const
const AbstractType & static_type() const
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static intptr_t MethodKindToReceiverCid(Kind kind)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
bool has_entry_points_temp_var() const
LocalVariable * expression_temp_var() const
const Function & function() const
LocalVariable * RawTypeArgumentsVariable() const
LocalScope * scope() const
const Function * forwarding_stub_super_target() const
bool has_receiver_var() const
LocalVariable * entry_points_temp_var() const
bool has_arg_desc_var() const
bool is_forwarding_stub() const
DynamicClosureCallVars * dynamic_closure_call_vars() const
LocalVariable * ParameterVariable(intptr_t i) const
LocalVariable * current_context_var() const
LocalVariable * RawParameterVariable(intptr_t i) const
LocalVariable * receiver_var() const
LocalVariable * function_type_arguments() const
static RangeBoundary FromConstant(int64_t val)
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
static DART_NORETURN void LongJump(const Error &error)
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add)
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
static SmiPtr New(intptr_t value)
static Representation ValueRepresentation(intptr_t array_cid)
static const String & LAngleBracket()
static const String & RAngleBracket()
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
static const String & Empty()
static StringPtr New(Thread *thread, const char *cstr)
static const String & Dot()
static Thread * Current()
CompilerState & compiler_state()
IsolateGroup * isolate_group() const
static TokenPosition Synthetic(intptr_t value)
static const TokenPosition kMinSource
static constexpr intptr_t kFlagsPerSmiShift
static constexpr intptr_t kFlagsPerSmiMask
static TypePtr DynamicType()
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
BitField< decltype(packed_type_parameter_counts_), uint8_t, PackedNumParentTypeArguments::kNextBit, 8 > PackedNumTypeParameters
BitField< decltype(packed_type_parameter_counts_), uint8_t, 0, 8 > PackedNumParentTypeArguments
static constexpr int ShiftForPowerOfTwo(T x)
static T Minimum(T x, T y)
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static CallMarshaller * FromFunction(Zone *zone, const Function &function, intptr_t function_params_start_at, const FunctionType &c_signature, const char **error)
static CallbackMarshaller * FromFunction(Zone *zone, const Function &function, const char **error)
static word ElementSizeFor(intptr_t cid)
static const word kNumFieldsMask
static const word kFieldNamesIndexShift
static const word kFieldNamesIndexMask
static word field_offset(intptr_t index)
static word unboxed_runtime_arg_offset()
static word OffsetFromThread(const dart::Object &object)
intptr_t ClassNumTypeArguments()
Fragment IntConstant(int64_t value)
Fragment SmiRelationalOp(Token::Kind kind)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment GenericCheckBound()
Definition * Peek(intptr_t depth=0)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment ConvertUnboxedToUntagged()
Fragment LoadContextAt(int depth)
Fragment DebugStepCheck(TokenPosition position)
const Array & saved_args_desc_array()
Fragment CalculateElementAddress(intptr_t index_scale)
intptr_t last_used_block_id_
JoinEntryInstr * BuildThrowNoSuchMethod()
InputsArray GetArguments(int count)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
Fragment LoadArgDescriptor()
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
JoinEntryInstr * BuildJoinEntry()
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment StoreFpRelativeSlot(intptr_t offset)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
intptr_t AllocateBlockId()
Fragment StoreStaticField(TokenPosition position, const Field &field)
void InlineBailout(const char *reason)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
void FinalizeCoverageArray()
Fragment LoadUntagged(intptr_t offset)
Fragment TailCall(const Code &code)
Fragment AssertBool(TokenPosition position)
Fragment InstantiateDynamicTypeArguments()
Fragment BuildEntryPointsIntrospection()
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
void SetCurrentTryIndex(intptr_t try_index)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
intptr_t AllocateTryIndex()
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
Fragment Return(TokenPosition position)
Fragment Box(Representation from)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
intptr_t GetNextDeoptId()
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment UnaryDoubleOp(Token::Kind op)
const Array & coverage_array() const
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment Constant(const Object &value)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
void Push(Definition *definition)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
intptr_t CurrentTryIndex() const
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment ConvertUntaggedToUnboxed()
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
virtual ~FlowGraphBuilder()
FlowGraphBuilder(ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, ZoneGrowableArray< intptr_t > *context_level_array, InlineExitCollector *exit_collector, bool optimizing, intptr_t osr_id, intptr_t first_block_id=1, bool inlining_unchecked_entry=false)
static bool IsExpressionTempVarUsedInRecognizedMethodFlowGraph(const Function &function)
static bool IsRecognizedMethodForFlowGraph(const Function &function)
void Prepend(Instruction *start)
LocalVariable * type_arguments_variable
IntMap< LocalScope * > scopes
IntMap< LocalVariable * > locals
intptr_t num_ast_nodes() const
const Instance & value() const
void set_integer(const Integer &integer)
const Integer & expression_max() const
const AbstractType & expression_type() const
bool is_optimizable() const
bool is_enum_switch() const
SwitchHelper(Zone *zone, TokenPosition position, bool is_exhaustive, const AbstractType &expression_type, SwitchBlock *switch_block, intptr_t case_count)
void AddExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const GrowableArray< SwitchExpression > & expressions() const
int64_t ExpressionRange() const
intptr_t case_count() const
bool RequiresUpperBoundCheck() const
SwitchDispatch SelectDispatchStrategy()
bool is_exhaustive() const
const Integer & expression_min() const
const TokenPosition & position() const
bool RequiresLowerBoundCheck() const
static Editor::Movement convert(skui::Key key)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
static float max(float r, float g, float b)
static float min(float r, float g, float b)
#define LOAD_NATIVE_FIELD(V)
#define CASE(method, slot)
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
#define STORE_NATIVE_FIELD(V)
#define IL_BODY(method, slot)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
AlignmentType RecognizedMethodAlignment(MethodRecognizer::Kind kind)
classid_t RecognizedMethodTypeArgCid(MethodRecognizer::Kind kind)
const intptr_t kResultIndex
classid_t ElementExternalTypedDataCid(classid_t class_id)
classid_t ElementTypedDataCid(classid_t class_id)
static constexpr intptr_t kNumParameterFlagsPerElementLog2
@ kRequiredNamedParameterFlag
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
static constexpr intptr_t kNumParameterFlagsPerElement
const Class & GrowableObjectArrayClass()
@ kCheckCovariantTypeParameterBounds
@ kCheckNonCovariantTypeParameterBounds
@ kCheckAllTypeParameterBounds
static const Function & TypedListSetNativeFunction(Thread *thread, classid_t cid)
static classid_t TypedDataCidUnboxed(Representation unboxed_representation)
@ kSwitchDispatchLinearScan
@ kSwitchDispatchJumpTable
@ kSwitchDispatchBinarySearch
static classid_t external_typed_data_cid(intptr_t chunk_size)
static classid_t typed_data_cid(intptr_t chunk_size)
const Function & TypedListGetNativeFunction(Thread *thread, classid_t cid)
static intptr_t chunk_size(intptr_t bytes_left)
static bool CanUnboxElements(classid_t cid)
constexpr int64_t kMaxInt64
static const char *const names[]
@ kSharesInstantiatorTypeArguments
@ kSharesFunctionTypeArguments
bool IsTypedDataBaseClassId(intptr_t index)
static constexpr Representation kUnboxedUword
@ TypedDataView_offset_in_bytes
@ TypedDataView_typed_data
@ kIsolateLocalClosureCallback
@ kIsolateLocalStaticCallback
@ kUnmodifiableByteDataViewCid
constexpr intptr_t kBitsPerByte
GrowableArray< Value * > InputsArray
bool IsZero(char *begin, char *end)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static constexpr Representation kUnboxedAddress
constexpr int32_t kMaxInt32
intptr_t FfiResolveInternal(const String &asset, const String &symbol, uintptr_t args_n, char **error)
constexpr intptr_t kWordSize
static constexpr Representation kUnboxedIntPtr
static constexpr Representation kUnboxedWord
static constexpr intptr_t kInvalidTryIndex
bool IsExternalTypedDataClassId(intptr_t index)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
std::function< void()> closure
static SkString join(const CommandLineFlags::StringArray &)
static constexpr size_t ValueSize(Representation rep)
static Representation RepresentationOfArrayElement(classid_t cid)
LocalVariable * num_fixed_params
LocalVariable * type_parameter_flags
LocalVariable * num_max_params
LocalVariable * type_parameters
ParsedFunction::DynamicClosureCallVars *const vars
LocalVariable * parent_function_type_args
LocalVariable * num_opt_params
ClosureCallInfo(LocalVariable *closure, JoinEntryInstr *throw_no_such_method, const Array &arguments_descriptor_array, ParsedFunction::DynamicClosureCallVars *const vars)
LocalVariable * instantiator_type_args
const ArgumentsDescriptor descriptor
LocalVariable *const closure
LocalVariable * num_type_parameters
JoinEntryInstr *const throw_no_such_method
LocalVariable * signature
LocalVariable * parameter_types
LocalVariable * num_parent_type_args
LocalVariable * has_named_params
LocalVariable * named_parameter_names