Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Classes | Namespaces | Macros | Functions
kernel_to_il.cc File Reference
#include "vm/compiler/frontend/kernel_to_il.h"
#include <utility>
#include "lib/ffi_dynamic_library.h"
#include "platform/assert.h"
#include "platform/globals.h"
#include "vm/class_id.h"
#include "vm/compiler/aot/precompiler.h"
#include "vm/compiler/backend/flow_graph_compiler.h"
#include "vm/compiler/backend/il.h"
#include "vm/compiler/backend/il_printer.h"
#include "vm/compiler/backend/locations.h"
#include "vm/compiler/backend/range_analysis.h"
#include "vm/compiler/ffi/abi.h"
#include "vm/compiler/ffi/marshaller.h"
#include "vm/compiler/ffi/native_calling_convention.h"
#include "vm/compiler/ffi/native_location.h"
#include "vm/compiler/ffi/native_type.h"
#include "vm/compiler/ffi/recognized_method.h"
#include "vm/compiler/frontend/kernel_binary_flowgraph.h"
#include "vm/compiler/frontend/kernel_translation_helper.h"
#include "vm/compiler/frontend/prologue_builder.h"
#include "vm/compiler/jit/compiler.h"
#include "vm/compiler/runtime_api.h"
#include "vm/kernel_isolate.h"
#include "vm/kernel_loader.h"
#include "vm/log.h"
#include "vm/longjump.h"
#include "vm/native_entry.h"
#include "vm/object_store.h"
#include "vm/report.h"
#include "vm/resolver.h"
#include "vm/runtime_entry.h"
#include "vm/scopes.h"
#include "vm/stack_frame.h"
#include "vm/symbols.h"

Go to the source code of this file.

Classes

struct  dart::kernel::FlowGraphBuilder::ClosureCallInfo
 

Namespaces

namespace  dart
 
namespace  dart::kernel
 

Macros

#define Z   (zone_)
 
#define H   (translation_helper_)
 
#define T   (type_translator_)
 
#define I   Isolate::Current()
 
#define IG   IsolateGroup::Current()
 
#define LOAD_NATIVE_FIELD(V)
 
#define STORE_NATIVE_FIELD(V)
 
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
 
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
 
#define CASE(method, slot)   case MethodRecognizer::k##method:
 
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
 
#define CASE(name)
 
#define IL_BODY(method, slot)
 
#define IL_BODY(method, slot)
 
#define IL_BODY(method, slot)
 

Functions

 dart::DEFINE_FLAG (bool, print_huge_methods, false, "Print huge methods (less optimized)")
 
 dart::DEFINE_FLAG (int, force_switch_dispatch_type, -1, "Force switch statements to use a particular dispatch type: " "-1=auto, 0=linear scan, 1=binary search, 2=jump table")
 
static bool dart::kernel::CanUnboxElements (classid_t cid)
 
const Functiondart::kernel::TypedListGetNativeFunction (Thread *thread, classid_t cid)
 
static const Functiondart::kernel::TypedListSetNativeFunction (Thread *thread, classid_t cid)
 
static classid_t dart::kernel::TypedDataCidUnboxed (Representation unboxed_representation)
 
static intptr_t dart::kernel::chunk_size (intptr_t bytes_left)
 
static classid_t dart::kernel::typed_data_cid (intptr_t chunk_size)
 
static classid_t dart::kernel::external_typed_data_cid (intptr_t chunk_size)
 

Macro Definition Documentation

◆ CASE [1/2]

#define CASE (   method,
  slot 
)    case MethodRecognizer::k##method:

◆ CASE [2/2]

#define CASE (   name)
Value:
case MethodRecognizer::kTypedData_##name##_factory: \
body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
break; \
case MethodRecognizer::kTypedData_##name##View_factory: \
body += BuildTypedDataViewFactoryConstructor(function, \
kTypedData##name##ViewCid); \
break; \
case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
body += BuildTypedDataViewFactoryConstructor( \
function, kUnmodifiableTypedData##name##ViewCid); \
break;
Dart_NativeFunction function
Definition fuchsia.cc:51
const char * name
Definition fuchsia.cc:50

◆ H

#define H   (translation_helper_)

Definition at line 59 of file kernel_to_il.cc.

◆ I

#define I   Isolate::Current()

Definition at line 61 of file kernel_to_il.cc.

◆ IG

#define IG   IsolateGroup::Current()

Definition at line 62 of file kernel_to_il.cc.

◆ IL_BODY [1/3]

#define IL_BODY (   method,
  slot 
)
Value:
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 1); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadNativeField(Slot::slot()); \
break;

◆ IL_BODY [2/3]

#define IL_BODY (   method,
  slot 
)
Value:
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 2); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
body += StoreNativeField(Slot::slot()); \
body += NullConstant(); \
break;

◆ IL_BODY [3/3]

#define IL_BODY (   method,
  slot 
)
Value:
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 2); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
kNoStoreBarrier); \
body += NullConstant(); \
break;

◆ LOAD_NATIVE_FIELD

#define LOAD_NATIVE_FIELD (   V)

Definition at line 903 of file kernel_to_il.cc.

958 {
959 const MethodRecognizer::Kind kind = function.recognized_kind();
960
961 switch (kind) {
962#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
963 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
964 FALL_THROUGH; \
965 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
966 FALL_THROUGH; \
967 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
968 FALL_THROUGH;
970#undef TYPED_DATA_GET_INDEXED_CASES
971 case MethodRecognizer::kObjectArrayGetIndexed:
972 case MethodRecognizer::kGrowableArrayGetIndexed:
973 case MethodRecognizer::kRecord_fieldAt:
974 case MethodRecognizer::kRecord_fieldNames:
975 case MethodRecognizer::kRecord_numFields:
976 case MethodRecognizer::kSuspendState_clone:
977 case MethodRecognizer::kSuspendState_resume:
978 case MethodRecognizer::kTypedList_GetInt8:
979 case MethodRecognizer::kTypedList_SetInt8:
980 case MethodRecognizer::kTypedList_GetUint8:
981 case MethodRecognizer::kTypedList_SetUint8:
982 case MethodRecognizer::kTypedList_GetInt16:
983 case MethodRecognizer::kTypedList_SetInt16:
984 case MethodRecognizer::kTypedList_GetUint16:
985 case MethodRecognizer::kTypedList_SetUint16:
986 case MethodRecognizer::kTypedList_GetInt32:
987 case MethodRecognizer::kTypedList_SetInt32:
988 case MethodRecognizer::kTypedList_GetUint32:
989 case MethodRecognizer::kTypedList_SetUint32:
990 case MethodRecognizer::kTypedList_GetInt64:
991 case MethodRecognizer::kTypedList_SetInt64:
992 case MethodRecognizer::kTypedList_GetUint64:
993 case MethodRecognizer::kTypedList_SetUint64:
994 case MethodRecognizer::kTypedList_GetFloat32:
995 case MethodRecognizer::kTypedList_SetFloat32:
996 case MethodRecognizer::kTypedList_GetFloat64:
997 case MethodRecognizer::kTypedList_SetFloat64:
998 case MethodRecognizer::kTypedList_GetInt32x4:
999 case MethodRecognizer::kTypedList_SetInt32x4:
1000 case MethodRecognizer::kTypedList_GetFloat32x4:
1001 case MethodRecognizer::kTypedList_SetFloat32x4:
1002 case MethodRecognizer::kTypedList_GetFloat64x2:
1003 case MethodRecognizer::kTypedList_SetFloat64x2:
1004 case MethodRecognizer::kTypedData_memMove1:
1005 case MethodRecognizer::kTypedData_memMove2:
1006 case MethodRecognizer::kTypedData_memMove4:
1007 case MethodRecognizer::kTypedData_memMove8:
1008 case MethodRecognizer::kTypedData_memMove16:
1009 case MethodRecognizer::kTypedData_ByteDataView_factory:
1010 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
1011 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
1012 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
1013 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
1014 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
1015 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
1016 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
1017 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
1018 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
1019 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
1020 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
1021 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
1022 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
1023 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
1024 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1025 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
1026 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
1027 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
1028 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
1029 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
1030 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
1031 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
1032 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
1033 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
1034 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
1035 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
1036 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
1037 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
1038 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
1039 case MethodRecognizer::kTypedData_Int8Array_factory:
1040 case MethodRecognizer::kTypedData_Uint8Array_factory:
1041 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
1042 case MethodRecognizer::kTypedData_Int16Array_factory:
1043 case MethodRecognizer::kTypedData_Uint16Array_factory:
1044 case MethodRecognizer::kTypedData_Int32Array_factory:
1045 case MethodRecognizer::kTypedData_Uint32Array_factory:
1046 case MethodRecognizer::kTypedData_Int64Array_factory:
1047 case MethodRecognizer::kTypedData_Uint64Array_factory:
1048 case MethodRecognizer::kTypedData_Float32Array_factory:
1049 case MethodRecognizer::kTypedData_Float64Array_factory:
1050 case MethodRecognizer::kTypedData_Float32x4Array_factory:
1051 case MethodRecognizer::kTypedData_Int32x4Array_factory:
1052 case MethodRecognizer::kTypedData_Float64x2Array_factory:
1053 case MethodRecognizer::kMemCopy:
1054 case MethodRecognizer::kFfiLoadInt8:
1055 case MethodRecognizer::kFfiLoadInt16:
1056 case MethodRecognizer::kFfiLoadInt32:
1057 case MethodRecognizer::kFfiLoadInt64:
1058 case MethodRecognizer::kFfiLoadUint8:
1059 case MethodRecognizer::kFfiLoadUint16:
1060 case MethodRecognizer::kFfiLoadUint32:
1061 case MethodRecognizer::kFfiLoadUint64:
1062 case MethodRecognizer::kFfiLoadFloat:
1063 case MethodRecognizer::kFfiLoadFloatUnaligned:
1064 case MethodRecognizer::kFfiLoadDouble:
1065 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1066 case MethodRecognizer::kFfiLoadPointer:
1067 case MethodRecognizer::kFfiNativeCallbackFunction:
1068 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1069 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction:
1070 case MethodRecognizer::kFfiStoreInt8:
1071 case MethodRecognizer::kFfiStoreInt16:
1072 case MethodRecognizer::kFfiStoreInt32:
1073 case MethodRecognizer::kFfiStoreInt64:
1074 case MethodRecognizer::kFfiStoreUint8:
1075 case MethodRecognizer::kFfiStoreUint16:
1076 case MethodRecognizer::kFfiStoreUint32:
1077 case MethodRecognizer::kFfiStoreUint64:
1078 case MethodRecognizer::kFfiStoreFloat:
1079 case MethodRecognizer::kFfiStoreFloatUnaligned:
1080 case MethodRecognizer::kFfiStoreDouble:
1081 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1082 case MethodRecognizer::kFfiStorePointer:
1083 case MethodRecognizer::kFfiFromAddress:
1084 case MethodRecognizer::kFfiGetAddress:
1085 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1086 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1087 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1088 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1089 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1090 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1091 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1092 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1093 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1094 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1095 case MethodRecognizer::kGetNativeField:
1096 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1097 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1098 case MethodRecognizer::kFinalizerBase_setIsolate:
1099 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1100 case MethodRecognizer::kFinalizerEntry_allocate:
1101 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1102 case MethodRecognizer::kCheckNotDeeplyImmutable:
1103 case MethodRecognizer::kObjectEquals:
1104 case MethodRecognizer::kStringBaseCodeUnitAt:
1105 case MethodRecognizer::kStringBaseLength:
1106 case MethodRecognizer::kStringBaseIsEmpty:
1107 case MethodRecognizer::kClassIDgetID:
1108 case MethodRecognizer::kGrowableArrayAllocateWithData:
1109 case MethodRecognizer::kGrowableArrayCapacity:
1110 case MethodRecognizer::kObjectArrayAllocate:
1111 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1112 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1113 case MethodRecognizer::kFfiAbi:
1114 case MethodRecognizer::kUtf8DecoderScan:
1115 case MethodRecognizer::kHas63BitSmis:
1116 case MethodRecognizer::kExtensionStreamHasListener:
1117 case MethodRecognizer::kSmi_hashCode:
1118 case MethodRecognizer::kMint_hashCode:
1119 case MethodRecognizer::kDouble_hashCode:
1120#define CASE(method, slot) case MethodRecognizer::k##method:
1124#undef CASE
1125 return true;
1126 case MethodRecognizer::kDoubleToInteger:
1127 case MethodRecognizer::kDoubleMod:
1128 case MethodRecognizer::kDoubleRoundToDouble:
1129 case MethodRecognizer::kDoubleTruncateToDouble:
1130 case MethodRecognizer::kDoubleFloorToDouble:
1131 case MethodRecognizer::kDoubleCeilToDouble:
1132 case MethodRecognizer::kMathDoublePow:
1133 case MethodRecognizer::kMathSin:
1134 case MethodRecognizer::kMathCos:
1135 case MethodRecognizer::kMathTan:
1136 case MethodRecognizer::kMathAsin:
1137 case MethodRecognizer::kMathAcos:
1138 case MethodRecognizer::kMathAtan:
1139 case MethodRecognizer::kMathAtan2:
1140 case MethodRecognizer::kMathExp:
1141 case MethodRecognizer::kMathLog:
1142 case MethodRecognizer::kMathSqrt:
1143 return FlowGraphCompiler::SupportsUnboxedDoubles();
1144 case MethodRecognizer::kDoubleCeilToInt:
1145 case MethodRecognizer::kDoubleFloorToInt:
1146 if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
1147#if defined(TARGET_ARCH_X64)
1148 return CompilerState::Current().is_aot() || FLAG_target_unknown_cpu;
1149#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1150 defined(TARGET_ARCH_RISCV64)
1151 return true;
1152#else
1153 return false;
1154#endif
1155 default:
1156 return false;
1157 }
1158}
1159
1160bool FlowGraphBuilder::IsExpressionTempVarUsedInRecognizedMethodFlowGraph(
1161 const Function& function) {
1162 ASSERT(IsRecognizedMethodForFlowGraph(function));
1163 switch (function.recognized_kind()) {
1164 case MethodRecognizer::kStringBaseCodeUnitAt:
1165 return true;
1166 default:
1167 return false;
1168 }
1169}
1170
1171FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1172 const Function& function) {
1173 ASSERT(IsRecognizedMethodForFlowGraph(function));
1174
1175 graph_entry_ =
1176 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
1177
1178 auto normal_entry = BuildFunctionEntry(graph_entry_);
1179 graph_entry_->set_normal_entry(normal_entry);
1180
1181 PrologueInfo prologue_info(-1, -1);
1182 BlockEntryInstr* instruction_cursor =
1183 BuildPrologue(normal_entry, &prologue_info);
1184
1185 Fragment body(instruction_cursor);
1186 body += CheckStackOverflowInPrologue(function.token_pos());
1187
1188 if (function.IsDynamicInvocationForwarder()) {
1189 body += BuildDefaultTypeHandling(function);
1190 BuildTypeArgumentTypeChecks(
1191 TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds, &body);
1192 BuildArgumentTypeChecks(&body, &body, nullptr);
1193 }
1194
1195 const MethodRecognizer::Kind kind = function.recognized_kind();
1196 switch (kind) {
1197#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
1198 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
1199 FALL_THROUGH; \
1200 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
1201 FALL_THROUGH; \
1202 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
1203 FALL_THROUGH;
1205#undef TYPED_DATA_GET_INDEXED_CASES
1206 case MethodRecognizer::kObjectArrayGetIndexed:
1207 case MethodRecognizer::kGrowableArrayGetIndexed: {
1208 ASSERT_EQUAL(function.NumParameters(), 2);
1209 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1210 const Representation elem_rep =
1211 RepresentationUtils::RepresentationOfArrayElement(array_cid);
1212 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1213 body += LoadNativeField(Slot::GetLengthFieldForArrayCid(array_cid));
1214 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1215 body += GenericCheckBound();
1216 LocalVariable* safe_index = MakeTemporary();
1217 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1218 if (IsTypedDataBaseClassId(array_cid) && !CanUnboxElements(array_cid)) {
1219 const auto& native_function =
1220 TypedListGetNativeFunction(thread_, array_cid);
1221 body += LoadLocal(safe_index);
1222 body += UnboxTruncate(kUnboxedIntPtr);
1223 body += IntConstant(Utils::ShiftForPowerOfTwo(
1224 RepresentationUtils::ValueSize(elem_rep)));
1225 body += BinaryIntegerOp(Token::kSHL, kUnboxedIntPtr,
1226 /*is_truncating=*/true);
1227 body += StaticCall(TokenPosition::kNoSource, native_function, 2,
1228 ICData::kNoRebind);
1229 } else {
1230 if (kind == MethodRecognizer::kGrowableArrayGetIndexed) {
1231 body += LoadNativeField(Slot::GrowableObjectArray_data());
1232 array_cid = kArrayCid;
1233 } else if (IsExternalTypedDataClassId(array_cid)) {
1234 body += LoadNativeField(Slot::PointerBase_data(),
1235 InnerPointerAccess::kCannotBeInnerPointer);
1236 }
1237 body += LoadLocal(safe_index);
1238 body +=
1239 LoadIndexed(array_cid,
1240 /*index_scale=*/
1241 compiler::target::Instance::ElementSizeFor(array_cid),
1242 /*index_unboxed=*/
1243 GenericCheckBoundInstr::UseUnboxedRepresentation());
1244 if (elem_rep == kUnboxedFloat) {
1245 body += FloatToDouble();
1246 }
1247 }
1248 body += DropTempsPreserveTop(1); // Drop [safe_index], keep result.
1249 break;
1250 }
1251 case MethodRecognizer::kRecord_fieldAt:
1252 ASSERT_EQUAL(function.NumParameters(), 2);
1253 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1254 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1255 body += LoadIndexed(
1256 kRecordCid, /*index_scale*/ compiler::target::kCompressedWordSize);
1257 break;
1258 case MethodRecognizer::kRecord_fieldNames:
1259 body += LoadObjectStore();
1260 body += LoadNativeField(Slot::ObjectStore_record_field_names());
1261 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1262 body += LoadNativeField(Slot::Record_shape());
1263 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexShift);
1264 body += SmiBinaryOp(Token::kSHR);
1265 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexMask);
1266 body += SmiBinaryOp(Token::kBIT_AND);
1267 body += LoadIndexed(
1268 kArrayCid, /*index_scale=*/compiler::target::kCompressedWordSize);
1269 break;
1270 case MethodRecognizer::kRecord_numFields:
1271 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1272 body += LoadNativeField(Slot::Record_shape());
1273 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
1274 body += SmiBinaryOp(Token::kBIT_AND);
1275 break;
1276 case MethodRecognizer::kSuspendState_clone: {
1277 ASSERT_EQUAL(function.NumParameters(), 1);
1278 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1279 body += Call1ArgStub(TokenPosition::kNoSource,
1280 Call1ArgStubInstr::StubId::kCloneSuspendState);
1281 break;
1282 }
1283 case MethodRecognizer::kSuspendState_resume: {
1284 const Code& resume_stub =
1285 Code::ZoneHandle(Z, IG->object_store()->resume_stub());
1286 body += NullConstant();
1287 body += TailCall(resume_stub);
1288 break;
1289 }
1290 case MethodRecognizer::kTypedList_GetInt8:
1291 body += BuildTypedListGet(function, kTypedDataInt8ArrayCid);
1292 break;
1293 case MethodRecognizer::kTypedList_SetInt8:
1294 body += BuildTypedListSet(function, kTypedDataInt8ArrayCid);
1295 break;
1296 case MethodRecognizer::kTypedList_GetUint8:
1297 body += BuildTypedListGet(function, kTypedDataUint8ArrayCid);
1298 break;
1299 case MethodRecognizer::kTypedList_SetUint8:
1300 body += BuildTypedListSet(function, kTypedDataUint8ArrayCid);
1301 break;
1302 case MethodRecognizer::kTypedList_GetInt16:
1303 body += BuildTypedListGet(function, kTypedDataInt16ArrayCid);
1304 break;
1305 case MethodRecognizer::kTypedList_SetInt16:
1306 body += BuildTypedListSet(function, kTypedDataInt16ArrayCid);
1307 break;
1308 case MethodRecognizer::kTypedList_GetUint16:
1309 body += BuildTypedListGet(function, kTypedDataUint16ArrayCid);
1310 break;
1311 case MethodRecognizer::kTypedList_SetUint16:
1312 body += BuildTypedListSet(function, kTypedDataUint16ArrayCid);
1313 break;
1314 case MethodRecognizer::kTypedList_GetInt32:
1315 body += BuildTypedListGet(function, kTypedDataInt32ArrayCid);
1316 break;
1317 case MethodRecognizer::kTypedList_SetInt32:
1318 body += BuildTypedListSet(function, kTypedDataInt32ArrayCid);
1319 break;
1320 case MethodRecognizer::kTypedList_GetUint32:
1321 body += BuildTypedListGet(function, kTypedDataUint32ArrayCid);
1322 break;
1323 case MethodRecognizer::kTypedList_SetUint32:
1324 body += BuildTypedListSet(function, kTypedDataUint32ArrayCid);
1325 break;
1326 case MethodRecognizer::kTypedList_GetInt64:
1327 body += BuildTypedListGet(function, kTypedDataInt64ArrayCid);
1328 break;
1329 case MethodRecognizer::kTypedList_SetInt64:
1330 body += BuildTypedListSet(function, kTypedDataInt64ArrayCid);
1331 break;
1332 case MethodRecognizer::kTypedList_GetUint64:
1333 body += BuildTypedListGet(function, kTypedDataUint64ArrayCid);
1334 break;
1335 case MethodRecognizer::kTypedList_SetUint64:
1336 body += BuildTypedListSet(function, kTypedDataUint64ArrayCid);
1337 break;
1338 case MethodRecognizer::kTypedList_GetFloat32:
1339 body += BuildTypedListGet(function, kTypedDataFloat32ArrayCid);
1340 break;
1341 case MethodRecognizer::kTypedList_SetFloat32:
1342 body += BuildTypedListSet(function, kTypedDataFloat32ArrayCid);
1343 break;
1344 case MethodRecognizer::kTypedList_GetFloat64:
1345 body += BuildTypedListGet(function, kTypedDataFloat64ArrayCid);
1346 break;
1347 case MethodRecognizer::kTypedList_SetFloat64:
1348 body += BuildTypedListSet(function, kTypedDataFloat64ArrayCid);
1349 break;
1350 case MethodRecognizer::kTypedList_GetInt32x4:
1351 body += BuildTypedListGet(function, kTypedDataInt32x4ArrayCid);
1352 break;
1353 case MethodRecognizer::kTypedList_SetInt32x4:
1354 body += BuildTypedListSet(function, kTypedDataInt32x4ArrayCid);
1355 break;
1356 case MethodRecognizer::kTypedList_GetFloat32x4:
1357 body += BuildTypedListGet(function, kTypedDataFloat32x4ArrayCid);
1358 break;
1359 case MethodRecognizer::kTypedList_SetFloat32x4:
1360 body += BuildTypedListSet(function, kTypedDataFloat32x4ArrayCid);
1361 break;
1362 case MethodRecognizer::kTypedList_GetFloat64x2:
1363 body += BuildTypedListGet(function, kTypedDataFloat64x2ArrayCid);
1364 break;
1365 case MethodRecognizer::kTypedList_SetFloat64x2:
1366 body += BuildTypedListSet(function, kTypedDataFloat64x2ArrayCid);
1367 break;
1368 case MethodRecognizer::kTypedData_memMove1:
1369 body += BuildTypedDataMemMove(function, kTypedDataInt8ArrayCid);
1370 break;
1371 case MethodRecognizer::kTypedData_memMove2:
1372 body += BuildTypedDataMemMove(function, kTypedDataInt16ArrayCid);
1373 break;
1374 case MethodRecognizer::kTypedData_memMove4:
1375 body += BuildTypedDataMemMove(function, kTypedDataInt32ArrayCid);
1376 break;
1377 case MethodRecognizer::kTypedData_memMove8:
1378 body += BuildTypedDataMemMove(function, kTypedDataInt64ArrayCid);
1379 break;
1380 case MethodRecognizer::kTypedData_memMove16:
1381 body += BuildTypedDataMemMove(function, kTypedDataInt32x4ArrayCid);
1382 break;
1383#define CASE(name) \
1384 case MethodRecognizer::kTypedData_##name##_factory: \
1385 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1386 break; \
1387 case MethodRecognizer::kTypedData_##name##View_factory: \
1388 body += BuildTypedDataViewFactoryConstructor(function, \
1389 kTypedData##name##ViewCid); \
1390 break; \
1391 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1392 body += BuildTypedDataViewFactoryConstructor( \
1393 function, kUnmodifiableTypedData##name##ViewCid); \
1394 break;
1396#undef CASE
1397 case MethodRecognizer::kTypedData_ByteDataView_factory:
1398 body += BuildTypedDataViewFactoryConstructor(function, kByteDataViewCid);
1399 break;
1400 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1401 body += BuildTypedDataViewFactoryConstructor(
1402 function, kUnmodifiableByteDataViewCid);
1403 break;
1404 case MethodRecognizer::kObjectEquals:
1405 ASSERT_EQUAL(function.NumParameters(), 2);
1406 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1407 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1408 body += StrictCompare(Token::kEQ_STRICT);
1409 break;
1410 case MethodRecognizer::kStringBaseCodeUnitAt: {
1411 ASSERT_EQUAL(function.NumParameters(), 2);
1412 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1413 body += LoadNativeField(Slot::String_length());
1414 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1415 body += GenericCheckBound();
1416 LocalVariable* safe_index = MakeTemporary();
1417
1418 JoinEntryInstr* done = BuildJoinEntry();
1419 LocalVariable* result = parsed_function_->expression_temp_var();
1420 TargetEntryInstr* one_byte_string;
1421 TargetEntryInstr* two_byte_string;
1422 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1423 body += LoadClassId();
1424 body += IntConstant(kOneByteStringCid);
1425 body += BranchIfEqual(&one_byte_string, &two_byte_string);
1426
1427 body.current = one_byte_string;
1428 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1429 body += LoadLocal(safe_index);
1430 body += LoadIndexed(
1431 kOneByteStringCid,
1432 /*index_scale=*/
1433 compiler::target::Instance::ElementSizeFor(kOneByteStringCid),
1434 /*index_unboxed=*/GenericCheckBoundInstr::UseUnboxedRepresentation());
1435 body += StoreLocal(TokenPosition::kNoSource, result);
1436 body += Drop();
1437 body += Goto(done);
1438
1439 body.current = two_byte_string;
1440 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1441 body += LoadLocal(safe_index);
1442 body += LoadIndexed(
1443 kTwoByteStringCid,
1444 /*index_scale=*/
1445 compiler::target::Instance::ElementSizeFor(kTwoByteStringCid),
1446 /*index_unboxed=*/GenericCheckBoundInstr::UseUnboxedRepresentation());
1447 body += StoreLocal(TokenPosition::kNoSource, result);
1448 body += Drop();
1449 body += Goto(done);
1450
1451 body.current = done;
1452 body += DropTemporary(&safe_index);
1453 body += LoadLocal(result);
1454 } break;
1455 case MethodRecognizer::kStringBaseLength:
1456 case MethodRecognizer::kStringBaseIsEmpty:
1457 ASSERT_EQUAL(function.NumParameters(), 1);
1458 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1459 body += LoadNativeField(Slot::String_length());
1460 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1461 body += IntConstant(0);
1462 body += StrictCompare(Token::kEQ_STRICT);
1463 }
1464 break;
1465 case MethodRecognizer::kClassIDgetID:
1466 ASSERT_EQUAL(function.NumParameters(), 1);
1467 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1468 body += LoadClassId();
1469 break;
1470 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1471 ASSERT(function.IsFactory());
1472 ASSERT_EQUAL(function.NumParameters(), 2);
1473 const Class& cls =
1474 Class::ZoneHandle(Z, compiler::GrowableObjectArrayClass().ptr());
1475 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1476 body += AllocateObject(TokenPosition::kNoSource, cls, 1);
1477 LocalVariable* object = MakeTemporary();
1478 body += LoadLocal(object);
1479 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1480 body += StoreNativeField(Slot::GrowableObjectArray_data(),
1481 StoreFieldInstr::Kind::kInitializing,
1482 kNoStoreBarrier);
1483 body += LoadLocal(object);
1484 body += IntConstant(0);
1485 body += StoreNativeField(Slot::GrowableObjectArray_length(),
1486 StoreFieldInstr::Kind::kInitializing,
1487 kNoStoreBarrier);
1488 break;
1489 }
1490 case MethodRecognizer::kGrowableArrayCapacity:
1491 ASSERT_EQUAL(function.NumParameters(), 1);
1492 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1493 body += LoadNativeField(Slot::GrowableObjectArray_data());
1494 body += LoadNativeField(Slot::Array_length());
1495 break;
1496 case MethodRecognizer::kObjectArrayAllocate:
1497 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
1498 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1499 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1500 body += CreateArray();
1501 break;
1502 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1503 ASSERT_EQUAL(function.NumParameters(), 5);
1504 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1505 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1506 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1507 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1508 body += LoadLocal(parsed_function_->RawParameterVariable(4));
1509 body += MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1510 /*unboxed_inputs=*/false,
1511 /*can_overlap=*/false);
1512 body += NullConstant();
1513 break;
1514 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1515 ASSERT_EQUAL(function.NumParameters(), 2);
1516 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1517 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1518 // Uses a store-release barrier so that other isolates will see the
1519 // contents of the index after seeing the index itself.
1520 body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
1521 StoreFieldInstr::Kind::kOther, kEmitStoreBarrier,
1522 compiler::Assembler::kRelease);
1523 body += NullConstant();
1524 break;
1525 case MethodRecognizer::kUtf8DecoderScan:
1526 ASSERT_EQUAL(function.NumParameters(), 5);
1527 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // decoder
1528 body += LoadLocal(parsed_function_->RawParameterVariable(1)); // bytes
1529 body += LoadLocal(parsed_function_->RawParameterVariable(2)); // start
1530 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1531 body += UnboxTruncate(kUnboxedIntPtr);
1532 body += LoadLocal(parsed_function_->RawParameterVariable(3)); // end
1533 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1534 body += UnboxTruncate(kUnboxedIntPtr);
1535 body += LoadLocal(parsed_function_->RawParameterVariable(4)); // table
1536 body += Utf8Scan();
1537 body += Box(kUnboxedIntPtr);
1538 break;
1539 case MethodRecognizer::kMemCopy: {
1540 ASSERT_EQUAL(function.NumParameters(), 5);
1541 LocalVariable* arg_target = parsed_function_->RawParameterVariable(0);
1542 LocalVariable* arg_target_offset_in_bytes =
1543 parsed_function_->RawParameterVariable(1);
1544 LocalVariable* arg_source = parsed_function_->RawParameterVariable(2);
1545 LocalVariable* arg_source_offset_in_bytes =
1546 parsed_function_->RawParameterVariable(3);
1547 LocalVariable* arg_length_in_bytes =
1548 parsed_function_->RawParameterVariable(4);
1549 body += LoadLocal(arg_source);
1550 body += LoadLocal(arg_target);
1551 body += LoadLocal(arg_source_offset_in_bytes);
1552 body += UnboxTruncate(kUnboxedIntPtr);
1553 body += LoadLocal(arg_target_offset_in_bytes);
1554 body += UnboxTruncate(kUnboxedIntPtr);
1555 body += LoadLocal(arg_length_in_bytes);
1556 body += UnboxTruncate(kUnboxedIntPtr);
1557 body += MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1558 /*unboxed_inputs=*/true,
1559 /*can_overlap=*/true);
1560 body += NullConstant();
1561 } break;
1562 case MethodRecognizer::kFfiAbi:
1563 ASSERT_EQUAL(function.NumParameters(), 0);
1564 body += IntConstant(static_cast<int64_t>(compiler::ffi::TargetAbi()));
1565 break;
1566 case MethodRecognizer::kFfiNativeCallbackFunction:
1567 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1568 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction: {
1569 const auto& error = String::ZoneHandle(
1570 Z, Symbols::New(thread_,
1571 "This function should be handled on call site."));
1572 body += Constant(error);
1573 body += ThrowException(TokenPosition::kNoSource);
1574 break;
1575 }
1576 case MethodRecognizer::kFfiLoadInt8:
1577 case MethodRecognizer::kFfiLoadInt16:
1578 case MethodRecognizer::kFfiLoadInt32:
1579 case MethodRecognizer::kFfiLoadInt64:
1580 case MethodRecognizer::kFfiLoadUint8:
1581 case MethodRecognizer::kFfiLoadUint16:
1582 case MethodRecognizer::kFfiLoadUint32:
1583 case MethodRecognizer::kFfiLoadUint64:
1584 case MethodRecognizer::kFfiLoadFloat:
1585 case MethodRecognizer::kFfiLoadFloatUnaligned:
1586 case MethodRecognizer::kFfiLoadDouble:
1587 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1588 case MethodRecognizer::kFfiLoadPointer: {
1589 const classid_t ffi_type_arg_cid =
1590 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1591 const AlignmentType alignment =
1592 compiler::ffi::RecognizedMethodAlignment(kind);
1594 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1595
1596 ASSERT_EQUAL(function.NumParameters(), 2);
1597 // Argument can be a TypedData for loads on struct fields.
1598 LocalVariable* arg_typed_data_base =
1599 parsed_function_->RawParameterVariable(0);
1600 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1601
1602 body += LoadLocal(arg_typed_data_base);
1603 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1604 body += LoadLocal(arg_offset);
1605 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1606 body += UnboxTruncate(kUnboxedIntPtr);
1607 body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
1608 /*index_unboxed=*/true, alignment);
1609 if (kind == MethodRecognizer::kFfiLoadPointer) {
1610 const auto& pointer_class =
1611 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1612 const auto& type_arguments = TypeArguments::ZoneHandle(
1613 Z, IG->object_store()->type_argument_never());
1614
1615 // We do not reify Pointer type arguments
1616 ASSERT(function.NumTypeParameters() == 1);
1617 LocalVariable* address = MakeTemporary();
1618 body += Constant(type_arguments);
1619 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1620 LocalVariable* pointer = MakeTemporary();
1621 body += LoadLocal(pointer);
1622 body += LoadLocal(address);
1623 ASSERT_EQUAL(LoadIndexedInstr::ReturnRepresentation(typed_data_cid),
1624 kUnboxedAddress);
1625 body += ConvertUnboxedToUntagged();
1626 body += StoreNativeField(Slot::PointerBase_data(),
1627 InnerPointerAccess::kCannotBeInnerPointer,
1628 StoreFieldInstr::Kind::kInitializing);
1629 body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
1630 } else {
1631 // Avoid any unnecessary (and potentially deoptimizing) int
1632 // conversions by using the representation returned from LoadIndexed.
1633 body += Box(LoadIndexedInstr::ReturnRepresentation(typed_data_cid));
1634 }
1635 } break;
1636 case MethodRecognizer::kFfiStoreInt8:
1637 case MethodRecognizer::kFfiStoreInt16:
1638 case MethodRecognizer::kFfiStoreInt32:
1639 case MethodRecognizer::kFfiStoreInt64:
1640 case MethodRecognizer::kFfiStoreUint8:
1641 case MethodRecognizer::kFfiStoreUint16:
1642 case MethodRecognizer::kFfiStoreUint32:
1643 case MethodRecognizer::kFfiStoreUint64:
1644 case MethodRecognizer::kFfiStoreFloat:
1645 case MethodRecognizer::kFfiStoreFloatUnaligned:
1646 case MethodRecognizer::kFfiStoreDouble:
1647 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1648 case MethodRecognizer::kFfiStorePointer: {
1649 const classid_t ffi_type_arg_cid =
1650 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1651 const AlignmentType alignment =
1652 compiler::ffi::RecognizedMethodAlignment(kind);
1654 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1655
1656 // Argument can be a TypedData for stores on struct fields.
1657 LocalVariable* arg_typed_data_base =
1658 parsed_function_->RawParameterVariable(0);
1659 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1660 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
1661
1662 ASSERT_EQUAL(function.NumParameters(), 3);
1663
1664 body += LoadLocal(arg_typed_data_base); // Pointer.
1665 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1666 body += LoadLocal(arg_offset);
1667 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1668 body += UnboxTruncate(kUnboxedIntPtr);
1669 body += LoadLocal(arg_value);
1670 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1671 if (kind == MethodRecognizer::kFfiStorePointer) {
1672 // This can only be Pointer, so it is safe to load the data field.
1673 body += LoadNativeField(Slot::PointerBase_data(),
1674 InnerPointerAccess::kCannotBeInnerPointer);
1675 body += ConvertUntaggedToUnboxed();
1676 ASSERT_EQUAL(StoreIndexedInstr::ValueRepresentation(typed_data_cid),
1677 kUnboxedAddress);
1678 } else {
1679 // Avoid any unnecessary (and potentially deoptimizing) int
1680 // conversions by using the representation consumed by StoreIndexed.
1681 body += UnboxTruncate(
1682 StoreIndexedInstr::ValueRepresentation(typed_data_cid));
1683 }
1684 body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
1685 /*index_unboxed=*/true, alignment);
1686 body += NullConstant();
1687 } break;
1688 case MethodRecognizer::kFfiFromAddress: {
1689 const auto& pointer_class =
1690 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1691 const auto& type_arguments = TypeArguments::ZoneHandle(
1692 Z, IG->object_store()->type_argument_never());
1693
1694 ASSERT(function.NumTypeParameters() == 1);
1695 ASSERT_EQUAL(function.NumParameters(), 1);
1696 body += Constant(type_arguments);
1697 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1698 body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
1699 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
1700 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1701 // Use the same representation as FfiGetAddress so that the conversions
1702 // in Pointer.fromAddress(address).address cancel out if the temporary
1703 // Pointer allocation is removed.
1704 body += UnboxTruncate(kUnboxedAddress);
1705 body += ConvertUnboxedToUntagged();
1706 body += StoreNativeField(Slot::PointerBase_data(),
1707 InnerPointerAccess::kCannotBeInnerPointer,
1708 StoreFieldInstr::Kind::kInitializing);
1709 } break;
1710 case MethodRecognizer::kFfiGetAddress: {
1711 ASSERT_EQUAL(function.NumParameters(), 1);
1712 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Pointer.
1713 body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
1714 // This can only be Pointer, so it is safe to load the data field.
1715 body += LoadNativeField(Slot::PointerBase_data(),
1716 InnerPointerAccess::kCannotBeInnerPointer);
1717 body += ConvertUntaggedToUnboxed();
1718 body += Box(kUnboxedAddress);
1719 } break;
1720 case MethodRecognizer::kHas63BitSmis: {
1721#if defined(HAS_SMI_63_BITS)
1722 body += Constant(Bool::True());
1723#else
1724 body += Constant(Bool::False());
1725#endif // defined(ARCH_IS_64_BIT)
1726 } break;
1727 case MethodRecognizer::kExtensionStreamHasListener: {
1728#ifdef PRODUCT
1729 body += Constant(Bool::False());
1730#else
1731 body += LoadServiceExtensionStream();
1732 body += LoadNativeField(Slot::StreamInfo_enabled());
1733 // StreamInfo::enabled_ is a std::atomic<intptr_t>. This is effectively
1734 // relaxed order access, which is acceptable for this use case.
1735 body += IntToBool();
1736#endif // PRODUCT
1737 } break;
1738 case MethodRecognizer::kSmi_hashCode: {
1739 // TODO(dartbug.com/38985): We should make this LoadLocal+Unbox+
1740 // IntegerHash+Box. Though this would make use of unboxed values on stack
1741 // which isn't allowed in unoptimized mode.
1742 // Once force-optimized functions can be inlined, we should change this
1743 // code to the above.
1744 ASSERT_EQUAL(function.NumParameters(), 1);
1745 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1746 body += BuildIntegerHashCode(/*smi=*/true);
1747 } break;
1748 case MethodRecognizer::kMint_hashCode: {
1749 ASSERT_EQUAL(function.NumParameters(), 1);
1750 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1751 body += BuildIntegerHashCode(/*smi=*/false);
1752 } break;
1753 case MethodRecognizer::kDouble_hashCode: {
1754 ASSERT_EQUAL(function.NumParameters(), 1);
1755 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1756 body += UnboxTruncate(kUnboxedDouble);
1757 body += BuildDoubleHashCode();
1758 body += Box(kUnboxedInt64);
1759 } break;
1760 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1761 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1762 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1763 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1764 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1765 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1766 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1767 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1768 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1769 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1770 const classid_t ffi_type_arg_cid =
1771 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1773 compiler::ffi::ElementExternalTypedDataCid(ffi_type_arg_cid);
1774
1775 auto class_table = thread_->isolate_group()->class_table();
1776 ASSERT(class_table->HasValidClassAt(external_typed_data_cid));
1777 const auto& typed_data_class =
1778 Class::ZoneHandle(H.zone(), class_table->At(external_typed_data_cid));
1779
1780 // We assume that the caller has checked that the arguments are non-null
1781 // and length is in the range [0, kSmiMax/elementSize].
1782 ASSERT_EQUAL(function.NumParameters(), 2);
1783 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
1784 LocalVariable* arg_length = parsed_function_->RawParameterVariable(1);
1785
1786 body += AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1787 LocalVariable* typed_data_object = MakeTemporary();
1788
1789 // Initialize the result's length field.
1790 body += LoadLocal(typed_data_object);
1791 body += LoadLocal(arg_length);
1792 body += StoreNativeField(Slot::TypedDataBase_length(),
1793 StoreFieldInstr::Kind::kInitializing,
1794 kNoStoreBarrier);
1795
1796 // Initialize the result's data pointer field.
1797 body += LoadLocal(typed_data_object);
1798 body += LoadLocal(arg_pointer);
1799 body += LoadNativeField(Slot::PointerBase_data(),
1800 InnerPointerAccess::kCannotBeInnerPointer);
1801 body += StoreNativeField(Slot::PointerBase_data(),
1802 InnerPointerAccess::kCannotBeInnerPointer,
1803 StoreFieldInstr::Kind::kInitializing);
1804 } break;
1805 case MethodRecognizer::kGetNativeField: {
1806 auto& name = String::ZoneHandle(Z, function.name());
1807 // Note: This method is force optimized so we can push untagged, etc.
1808 // Load TypedDataArray from Instance Handle implementing
1809 // NativeFieldWrapper.
1810 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Object.
1811 body += CheckNullOptimized(name);
1812 body += LoadNativeField(Slot::Instance_native_fields_array()); // Fields.
1813 body += CheckNullOptimized(name);
1814 // Load the native field at index.
1815 body += IntConstant(0); // Index.
1816 body += LoadIndexed(kIntPtrCid);
1817 body += Box(kUnboxedIntPtr);
1818 } break;
1819 case MethodRecognizer::kDoubleToInteger:
1820 case MethodRecognizer::kDoubleCeilToInt:
1821 case MethodRecognizer::kDoubleFloorToInt: {
1822 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1823 body += DoubleToInteger(kind);
1824 } break;
1825 case MethodRecognizer::kDoubleMod:
1826 case MethodRecognizer::kDoubleRoundToDouble:
1827 case MethodRecognizer::kDoubleTruncateToDouble:
1828 case MethodRecognizer::kDoubleFloorToDouble:
1829 case MethodRecognizer::kDoubleCeilToDouble:
1830 case MethodRecognizer::kMathDoublePow:
1831 case MethodRecognizer::kMathSin:
1832 case MethodRecognizer::kMathCos:
1833 case MethodRecognizer::kMathTan:
1834 case MethodRecognizer::kMathAsin:
1835 case MethodRecognizer::kMathAcos:
1836 case MethodRecognizer::kMathAtan:
1837 case MethodRecognizer::kMathAtan2:
1838 case MethodRecognizer::kMathExp:
1839 case MethodRecognizer::kMathLog: {
1840 for (intptr_t i = 0, n = function.NumParameters(); i < n; ++i) {
1841 body += LoadLocal(parsed_function_->RawParameterVariable(i));
1842 }
1843 if (!CompilerState::Current().is_aot() &&
1844 TargetCPUFeatures::double_truncate_round_supported() &&
1845 ((kind == MethodRecognizer::kDoubleTruncateToDouble) ||
1846 (kind == MethodRecognizer::kDoubleFloorToDouble) ||
1847 (kind == MethodRecognizer::kDoubleCeilToDouble))) {
1848 switch (kind) {
1849 case MethodRecognizer::kDoubleTruncateToDouble:
1850 body += UnaryDoubleOp(Token::kTRUNCATE);
1851 break;
1852 case MethodRecognizer::kDoubleFloorToDouble:
1853 body += UnaryDoubleOp(Token::kFLOOR);
1854 break;
1855 case MethodRecognizer::kDoubleCeilToDouble:
1856 body += UnaryDoubleOp(Token::kCEILING);
1857 break;
1858 default:
1859 UNREACHABLE();
1860 }
1861 } else {
1862 body += InvokeMathCFunction(kind, function.NumParameters());
1863 }
1864 } break;
1865 case MethodRecognizer::kMathSqrt: {
1866 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1867 body += UnaryDoubleOp(Token::kSQRT);
1868 } break;
1869 case MethodRecognizer::kFinalizerBase_setIsolate:
1870 ASSERT_EQUAL(function.NumParameters(), 1);
1871 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1872 body += LoadIsolate();
1873 body += StoreNativeField(Slot::FinalizerBase_isolate(),
1874 InnerPointerAccess::kCannotBeInnerPointer);
1875 body += NullConstant();
1876 break;
1877 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1878 ASSERT_EQUAL(function.NumParameters(), 0);
1879 body += LoadIsolate();
1880 body += LoadNativeField(Slot::Isolate_finalizers());
1881 break;
1882 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1883 ASSERT_EQUAL(function.NumParameters(), 1);
1884 body += LoadIsolate();
1885 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1886 body += StoreNativeField(Slot::Isolate_finalizers());
1887 body += NullConstant();
1888 break;
1889 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1890 ASSERT_EQUAL(function.NumParameters(), 1);
1891 ASSERT(this->optimizing_);
1892 // This relies on being force-optimized to do an 'atomic' exchange w.r.t.
1893 // the GC.
1894 // As an alternative design we could introduce an ExchangeNativeFieldInstr
1895 // that uses the same machine code as std::atomic::exchange. Or we could
1896 // use an Native to do that in C.
1897 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1898 // No GC from here til StoreNativeField.
1899 body += LoadNativeField(Slot::FinalizerBase_entries_collected());
1900 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1901 body += NullConstant();
1902 body += StoreNativeField(Slot::FinalizerBase_entries_collected());
1903 break;
1904 case MethodRecognizer::kFinalizerEntry_allocate: {
1905 // Object value, Object token, Object detach, FinalizerBase finalizer
1906 ASSERT_EQUAL(function.NumParameters(), 4);
1907
1908 const auto class_table = thread_->isolate_group()->class_table();
1909 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1910 const auto& finalizer_entry_class =
1911 Class::ZoneHandle(H.zone(), class_table->At(kFinalizerEntryCid));
1912
1913 body +=
1914 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1915 LocalVariable* const entry = MakeTemporary("entry");
1916 // No GC from here to the end.
1917 body += LoadLocal(entry);
1918 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1919 body += StoreNativeField(Slot::FinalizerEntry_value());
1920 body += LoadLocal(entry);
1921 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1922 body += StoreNativeField(Slot::FinalizerEntry_token());
1923 body += LoadLocal(entry);
1924 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1925 body += StoreNativeField(Slot::FinalizerEntry_detach());
1926 body += LoadLocal(entry);
1927 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1928 body += StoreNativeField(Slot::FinalizerEntry_finalizer());
1929 body += LoadLocal(entry);
1930 body += UnboxedIntConstant(0, kUnboxedIntPtr);
1931 body += StoreNativeField(Slot::FinalizerEntry_external_size());
1932 break;
1933 }
1934 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1935 ASSERT_EQUAL(function.NumParameters(), 1);
1936 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1937 body += LoadNativeField(Slot::FinalizerEntry_external_size());
1938 body += Box(kUnboxedInt64);
1939 break;
1940 case MethodRecognizer::kCheckNotDeeplyImmutable:
1941 ASSERT_EQUAL(function.NumParameters(), 1);
1942 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1943 body += CheckNotDeeplyImmutable(
1944 CheckWritableInstr::kDeeplyImmutableAttachNativeFinalizer);
1945 body += NullConstant();
1946 break;
1947#define IL_BODY(method, slot) \
1948 case MethodRecognizer::k##method: \
1949 ASSERT_EQUAL(function.NumParameters(), 1); \
1950 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1951 body += LoadNativeField(Slot::slot()); \
1952 break;
1954#undef IL_BODY
1955#define IL_BODY(method, slot) \
1956 case MethodRecognizer::k##method: \
1957 ASSERT_EQUAL(function.NumParameters(), 2); \
1958 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1959 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1960 body += StoreNativeField(Slot::slot()); \
1961 body += NullConstant(); \
1962 break;
1964#undef IL_BODY
1965#define IL_BODY(method, slot) \
1966 case MethodRecognizer::k##method: \
1967 ASSERT_EQUAL(function.NumParameters(), 2); \
1968 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1969 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1970 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1971 kNoStoreBarrier); \
1972 body += NullConstant(); \
1973 break;
1975#undef IL_BODY
1976 default: {
1977 UNREACHABLE();
1978 break;
1979 }
1980 }
1981
1982 if (body.is_open()) {
1983 body +=
1984 Return(TokenPosition::kNoSource, /* omit_result_type_check = */ true);
1985 }
1986
1987 return new (Z)
1988 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
1989 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
1990}
1991
1992Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1993 const Function& function,
1994 classid_t cid) {
1995 auto token_pos = function.token_pos();
1996 auto class_table = Thread::Current()->isolate_group()->class_table();
1997
1998 ASSERT(class_table->HasValidClassAt(cid));
1999 const auto& view_class = Class::ZoneHandle(H.zone(), class_table->At(cid));
2000
2001 ASSERT(function.IsFactory() && (function.NumParameters() == 4));
2002 LocalVariable* typed_data = parsed_function_->RawParameterVariable(1);
2003 LocalVariable* offset_in_bytes = parsed_function_->RawParameterVariable(2);
2004 LocalVariable* length = parsed_function_->RawParameterVariable(3);
2005
2006 Fragment body;
2007
2008 // Note that we do no input checking here before allocation. The factory is
2009 // private, and only called by other code in the library implementation.
2010 // Thus, either the inputs are checked within Dart code before the factory is
2011 // called (e.g., the implementation of XList.sublistView), or the inputs to
2012 // the factory are retrieved from previously constructed TypedData objects
2013 // and thus already checked (e.g., the implementation of the
2014 // UnmodifiableXListView constructors).
2015
2016 body += AllocateObject(token_pos, view_class, /*arg_count=*/0);
2017 LocalVariable* view_object = MakeTemporary();
2018
2019 body += LoadLocal(view_object);
2020 body += LoadLocal(typed_data);
2021 body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
2022 StoreFieldInstr::Kind::kInitializing);
2023
2024 body += LoadLocal(view_object);
2025 body += LoadLocal(offset_in_bytes);
2026 body +=
2027 StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
2028 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
2029
2030 body += LoadLocal(view_object);
2031 body += LoadLocal(length);
2032 body +=
2033 StoreNativeField(token_pos, Slot::TypedDataBase_length(),
2034 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
2035
2036 // First unbox the offset in bytes prior to the unsafe untagged load to avoid
2037 // any boxes being inserted between the load and its use. While any such box
2038 // is eventually canonicalized away, the FlowGraphChecker runs after every
2039 // pass in DEBUG mode and may see the box before canonicalization happens.
2040 body += LoadLocal(offset_in_bytes);
2041 body += UnboxTruncate(kUnboxedIntPtr);
2042 LocalVariable* unboxed_offset_in_bytes =
2043 MakeTemporary("unboxed_offset_in_bytes");
2044 // Now update the inner pointer.
2045 //
2046 // WARNING: Notice that we assume here no GC happens between the
2047 // LoadNativeField and the StoreNativeField, as the GC expects a properly
2048 // updated data field (see ScavengerVisitorBase::VisitTypedDataViewPointers).
2049 body += LoadLocal(view_object);
2050 body += LoadLocal(typed_data);
2051 body += LoadNativeField(Slot::PointerBase_data(),
2052 InnerPointerAccess::kMayBeInnerPointer);
2053 body += UnboxedIntConstant(0, kUnboxedIntPtr);
2054 body += LoadLocal(unboxed_offset_in_bytes);
2055 body += CalculateElementAddress(/*index_scale=*/1);
2056 body += StoreNativeField(Slot::PointerBase_data(),
2057 InnerPointerAccess::kMayBeInnerPointer,
2058 StoreFieldInstr::Kind::kInitializing);
2059 body += DropTemporary(&unboxed_offset_in_bytes);
2060
2061 return body;
2062}
2063
2064Fragment FlowGraphBuilder::BuildTypedListGet(const Function& function,
2065 classid_t cid) {
2066 const intptr_t kNumParameters = 2;
2067 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2068 // Guaranteed to be non-null since it's only called internally from other
2069 // instance methods.
2070 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2071 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2072 LocalVariable* arg_offset_in_bytes =
2073 parsed_function_->RawParameterVariable(1);
2074
2075 Fragment body;
2076 if (CanUnboxElements(cid)) {
2077 body += LoadLocal(arg_receiver);
2078 body += LoadLocal(arg_offset_in_bytes);
2079 body += LoadIndexed(cid, /*index_scale=*/1,
2080 /*index_unboxed=*/false, kUnalignedAccess);
2081 body += Box(LoadIndexedInstr::ReturnRepresentation(cid));
2082 } else {
2083 const auto& native_function = TypedListGetNativeFunction(thread_, cid);
2084 body += LoadLocal(arg_receiver);
2085 body += LoadLocal(arg_offset_in_bytes);
2086 body += StaticCall(TokenPosition::kNoSource, native_function,
2087 kNumParameters, ICData::kNoRebind);
2088 }
2089 return body;
2090}
2091
2092static const Function& TypedListSetNativeFunction(Thread* thread,
2093 classid_t cid) {
2094 auto& state = thread->compiler_state();
2095 switch (RepresentationUtils::RepresentationOfArrayElement(cid)) {
2096 case kUnboxedFloat:
2097 return state.TypedListSetFloat32();
2098 case kUnboxedDouble:
2099 return state.TypedListSetFloat64();
2100 case kUnboxedInt32x4:
2101 return state.TypedListSetInt32x4();
2102 case kUnboxedFloat32x4:
2103 return state.TypedListSetFloat32x4();
2104 case kUnboxedFloat64x2:
2105 return state.TypedListSetFloat64x2();
2106 default:
2107 UNREACHABLE();
2108 return Object::null_function();
2109 }
2110}
2111
2112Fragment FlowGraphBuilder::BuildTypedListSet(const Function& function,
2113 classid_t cid) {
2114 const intptr_t kNumParameters = 3;
2115 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2116 // Guaranteed to be non-null since it's only called internally from other
2117 // instance methods.
2118 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2119 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2120 LocalVariable* arg_offset_in_bytes =
2121 parsed_function_->RawParameterVariable(1);
2122 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
2123
2124 Fragment body;
2125 if (CanUnboxElements(cid)) {
2126 body += LoadLocal(arg_receiver);
2127 body += LoadLocal(arg_offset_in_bytes);
2128 body += LoadLocal(arg_value);
2129 body +=
2130 CheckNullOptimized(Symbols::Value(), CheckNullInstr::kArgumentError);
2131 body += UnboxTruncate(StoreIndexedInstr::ValueRepresentation(cid));
2132 body += StoreIndexedTypedData(cid, /*index_scale=*/1,
2133 /*index_unboxed=*/false, kUnalignedAccess);
2134 body += NullConstant();
2135 } else {
2136 const auto& native_function = TypedListSetNativeFunction(thread_, cid);
2137 body += LoadLocal(arg_receiver);
2138 body += LoadLocal(arg_offset_in_bytes);
2139 body += LoadLocal(arg_value);
2140 body += StaticCall(TokenPosition::kNoSource, native_function,
2141 kNumParameters, ICData::kNoRebind);
2142 }
2143 return body;
2144}
2145
2146Fragment FlowGraphBuilder::BuildTypedDataMemMove(const Function& function,
2147 classid_t cid) {
2148 ASSERT_EQUAL(parsed_function_->function().NumParameters(), 5);
2149 LocalVariable* arg_to = parsed_function_->RawParameterVariable(0);
2150 LocalVariable* arg_to_start = parsed_function_->RawParameterVariable(1);
2151 LocalVariable* arg_count = parsed_function_->RawParameterVariable(2);
2152 LocalVariable* arg_from = parsed_function_->RawParameterVariable(3);
2153 LocalVariable* arg_from_start = parsed_function_->RawParameterVariable(4);
2154
2155 Fragment body;
2156 // If we're copying at least this many elements, calling memmove via CCall
2157 // is faster than using the code currently emitted by MemoryCopy.
2158#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2159 // On X86, the breakpoint for using CCall instead of generating a loop via
2160 // MemoryCopy() is around the same as the largest benchmark (1048576 elements)
2161 // on the machines we use.
2162 const intptr_t kCopyLengthForCCall = 1024 * 1024;
2163#else
2164 // On other architectures, when the element size is less than a word,
2165 // we copy in word-sized chunks when possible to get back some speed without
2166 // increasing the number of emitted instructions for MemoryCopy too much, but
2167 // memmove is even more aggressive, copying in 64-byte chunks when possible.
2168 // Thus, the breakpoint for a call to memmove being faster is much lower for
2169 // our benchmarks than for X86.
2170 const intptr_t kCopyLengthForCCall = 1024;
2171#endif
2172
2173 JoinEntryInstr* done = BuildJoinEntry();
2174 TargetEntryInstr *is_small_enough, *is_too_large;
2175 body += LoadLocal(arg_count);
2176 body += IntConstant(kCopyLengthForCCall);
2177 body += SmiRelationalOp(Token::kLT);
2178 body += BranchIfTrue(&is_small_enough, &is_too_large);
2179
2180 Fragment use_instruction(is_small_enough);
2181 use_instruction += LoadLocal(arg_from);
2182 use_instruction += LoadLocal(arg_to);
2183 use_instruction += LoadLocal(arg_from_start);
2184 use_instruction += LoadLocal(arg_to_start);
2185 use_instruction += LoadLocal(arg_count);
2186 use_instruction += MemoryCopy(cid, cid,
2187 /*unboxed_inputs=*/false, /*can_overlap=*/true);
2188 use_instruction += Goto(done);
2189
2190 Fragment call_memmove(is_too_large);
2191 const intptr_t element_size = Instance::ElementSizeFor(cid);
2192 auto* const arg_reps =
2193 new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
2194 // First unbox the arguments to avoid any boxes being inserted between unsafe
2195 // untagged loads and their uses. Also adjust the length to be in bytes, since
2196 // that's what memmove expects.
2197 call_memmove += LoadLocal(arg_to_start);
2198 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2199 LocalVariable* to_start_unboxed = MakeTemporary("to_start_unboxed");
2200 call_memmove += LoadLocal(arg_from_start);
2201 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2202 LocalVariable* from_start_unboxed = MakeTemporary("from_start_unboxed");
2203 // Used for length in bytes calculations, since memmove expects a size_t.
2204 const Representation size_rep = kUnboxedUword;
2205 call_memmove += LoadLocal(arg_count);
2206 call_memmove += UnboxTruncate(size_rep);
2207 call_memmove += UnboxedIntConstant(element_size, size_rep);
2208 call_memmove +=
2209 BinaryIntegerOp(Token::kMUL, size_rep, /*is_truncating=*/true);
2210 LocalVariable* length_in_bytes = MakeTemporary("length_in_bytes");
2211 // dest: void*
2212 call_memmove += LoadLocal(arg_to);
2213 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2214 InnerPointerAccess::kMayBeInnerPointer);
2215 call_memmove += LoadLocal(to_start_unboxed);
2216 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2217 call_memmove += CalculateElementAddress(element_size);
2218 arg_reps->Add(kUntagged);
2219 // src: const void*
2220 call_memmove += LoadLocal(arg_from);
2221 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2222 InnerPointerAccess::kMayBeInnerPointer);
2223 call_memmove += LoadLocal(from_start_unboxed);
2224 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2225 call_memmove += CalculateElementAddress(element_size);
2226 arg_reps->Add(kUntagged);
2227 // n: size_t
2228 call_memmove += LoadLocal(length_in_bytes);
2229 arg_reps->Add(size_rep);
2230 // memmove(dest, src, n)
2231 call_memmove +=
2232 CallLeafRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
2233 // The returned address is unused.
2234 call_memmove += Drop();
2235 call_memmove += DropTemporary(&length_in_bytes);
2236 call_memmove += DropTemporary(&from_start_unboxed);
2237 call_memmove += DropTemporary(&to_start_unboxed);
2238 call_memmove += Goto(done);
2239
2240 body.current = done;
2241 body += NullConstant();
2242
2243 return body;
2244}
2245
2246Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
2247 const Function& function,
2248 classid_t cid) {
2249 const auto token_pos = function.token_pos();
2250 ASSERT(
2251 Thread::Current()->isolate_group()->class_table()->HasValidClassAt(cid));
2252
2253 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
2254 LocalVariable* length = parsed_function_->RawParameterVariable(1);
2255
2256 Fragment instructions;
2257 instructions += LoadLocal(length);
2258 // AllocateTypedData instruction checks that length is valid (a non-negative
2259 // Smi below maximum allowed length).
2260 instructions += AllocateTypedData(token_pos, cid);
2261 return instructions;
2262}
2263
2264Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
2265 TokenPosition position,
2266 const Function& target) {
2267 // The function cannot be local and have parent generic functions.
2268 ASSERT(!target.HasGenericParent());
2269 ASSERT(target.IsImplicitInstanceClosureFunction());
2270
2271 Fragment fragment;
2272 fragment += Constant(target);
2273 fragment += LoadLocal(parsed_function_->receiver_var());
2274 // The function signature can have uninstantiated class type parameters.
2275 const bool has_instantiator_type_args =
2276 !target.HasInstantiatedSignature(kCurrentClass);
2277 if (has_instantiator_type_args) {
2278 fragment += LoadInstantiatorTypeArguments();
2279 }
2280 fragment += AllocateClosure(position, has_instantiator_type_args,
2281 target.IsGeneric(), /*is_tear_off=*/true);
2282
2283 return fragment;
2284}
2285
2286Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
2287 const AbstractType& dst_type,
2288 const String& name_symbol) {
2289 return Fragment();
2290}
2291
2292bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
2293 TokenPosition position) {
2294 return position.IsDebugPause() && !function.is_native() &&
2295 function.is_debuggable();
2296}
2297
2298bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
2299 TokenPosition position) {
2300 if (!position.IsDebugPause()) {
2301 return false;
2302 }
2303 Definition* definition = value->definition();
2304 if (definition->IsConstant() || definition->IsLoadStaticField() ||
2305 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
2306 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
2307 return true;
2308 }
2309 if (auto const alloc = definition->AsAllocateClosure()) {
2310 return !alloc->known_function().IsNull();
2311 }
2312 return false;
2313}
2314
2315Fragment FlowGraphBuilder::EvaluateAssertion() {
2316 const Class& klass =
2317 Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::AssertionError()));
2318 ASSERT(!klass.IsNull());
2319 const auto& error = klass.EnsureIsFinalized(H.thread());
2320 ASSERT(error == Error::null());
2321 const Function& target = Function::ZoneHandle(
2322 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
2323 ASSERT(!target.IsNull());
2324 return StaticCall(TokenPosition::kNoSource, target, /* argument_count = */ 1,
2325 ICData::kStatic);
2326}
2327
2328Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
2329 Fragment instructions;
2330 LocalVariable* top_of_stack = MakeTemporary();
2331 instructions += LoadLocal(top_of_stack);
2332 instructions += AssertBool(position);
2333 instructions += Drop();
2334 return instructions;
2335}
2336
2337Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
2338 const String& dst_name,
2339 AssertAssignableInstr::Kind kind,
2340 TokenPosition token_pos) {
2341 Fragment instructions;
2342 if (!dst_type.IsTopTypeForSubtyping()) {
2343 LocalVariable* top_of_stack = MakeTemporary();
2344 instructions += LoadLocal(top_of_stack);
2345 instructions +=
2346 AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
2347 instructions += Drop();
2348 }
2349 return instructions;
2350}
2351
2352Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
2353 TokenPosition position,
2354 const AbstractType& dst_type,
2355 const String& dst_name,
2356 AssertAssignableInstr::Kind kind) {
2357 Fragment instructions;
2358
2359 instructions += Constant(AbstractType::ZoneHandle(dst_type.ptr()));
2360
2361 if (!dst_type.IsInstantiated(kCurrentClass)) {
2362 instructions += LoadInstantiatorTypeArguments();
2363 } else {
2364 instructions += NullConstant();
2365 }
2366
2367 if (!dst_type.IsInstantiated(kFunctions)) {
2368 instructions += LoadFunctionTypeArguments();
2369 } else {
2370 instructions += NullConstant();
2371 }
2372
2373 instructions += AssertAssignable(position, dst_name, kind);
2374
2375 return instructions;
2376}
2377
2378Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
2379 const AbstractType& sub_type_value,
2380 const AbstractType& super_type_value,
2381 const String& dst_name_value) {
2382 Fragment instructions;
2383 instructions += LoadInstantiatorTypeArguments();
2384 instructions += LoadFunctionTypeArguments();
2385 instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.ptr()));
2386 instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.ptr()));
2387 instructions += Constant(String::ZoneHandle(Z, dst_name_value.ptr()));
2388 instructions += AssertSubtype(position);
2389 return instructions;
2390}
2391
2392Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
2393 Fragment instructions;
2394
2395 Value* dst_name = Pop();
2396 Value* super_type = Pop();
2397 Value* sub_type = Pop();
2398 Value* function_type_args = Pop();
2399 Value* instantiator_type_args = Pop();
2400
2401 AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
2402 InstructionSource(position), instantiator_type_args, function_type_args,
2403 sub_type, super_type, dst_name, GetNextDeoptId());
2404 instructions += Fragment(instr);
2405
2406 return instructions;
2407}
2408
2409void FlowGraphBuilder::BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
2410 Fragment* implicit_checks) {
2411 const Function& dart_function = parsed_function_->function();
2412
2413 const Function* forwarding_target = nullptr;
2414 if (parsed_function_->is_forwarding_stub()) {
2415 forwarding_target = parsed_function_->forwarding_stub_super_target();
2416 ASSERT(!forwarding_target->IsNull());
2417 }
2418
2419 TypeParameters& type_parameters = TypeParameters::Handle(Z);
2420 if (dart_function.IsFactory()) {
2421 type_parameters = Class::Handle(Z, dart_function.Owner()).type_parameters();
2422 } else {
2423 type_parameters = dart_function.type_parameters();
2424 }
2425 const intptr_t num_type_params = type_parameters.Length();
2426 if (num_type_params == 0) return;
2427 if (forwarding_target != nullptr) {
2428 type_parameters = forwarding_target->type_parameters();
2429 ASSERT(type_parameters.Length() == num_type_params);
2430 }
2431 if (type_parameters.AllDynamicBounds()) {
2432 return; // All bounds are dynamic.
2433 }
2434 TypeParameter& type_param = TypeParameter::Handle(Z);
2435 String& name = String::Handle(Z);
2436 AbstractType& bound = AbstractType::Handle(Z);
2437 Fragment check_bounds;
2438 for (intptr_t i = 0; i < num_type_params; ++i) {
2439 bound = type_parameters.BoundAt(i);
2440 if (bound.IsTopTypeForSubtyping()) {
2441 continue;
2442 }
2443
2444 switch (mode) {
2445 case TypeChecksToBuild::kCheckAllTypeParameterBounds:
2446 break;
2447 case TypeChecksToBuild::kCheckCovariantTypeParameterBounds:
2448 if (!type_parameters.IsGenericCovariantImplAt(i)) {
2449 continue;
2450 }
2451 break;
2452 case TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds:
2453 if (type_parameters.IsGenericCovariantImplAt(i)) {
2454 continue;
2455 }
2456 break;
2457 }
2458
2459 name = type_parameters.NameAt(i);
2460
2461 if (forwarding_target != nullptr) {
2462 type_param = forwarding_target->TypeParameterAt(i);
2463 } else if (dart_function.IsFactory()) {
2464 type_param = Class::Handle(Z, dart_function.Owner()).TypeParameterAt(i);
2465 } else {
2466 type_param = dart_function.TypeParameterAt(i);
2467 }
2468 ASSERT(type_param.IsFinalized());
2469 check_bounds +=
2470 AssertSubtype(TokenPosition::kNoSource, type_param, bound, name);
2471 }
2472
2473 // Type arguments passed through partial instantiation are guaranteed to be
2474 // bounds-checked at the point of partial instantiation, so we don't need to
2475 // check them again at the call-site.
2476 if (dart_function.IsClosureFunction() && !check_bounds.is_empty() &&
2477 FLAG_eliminate_type_checks) {
2478 LocalVariable* closure = parsed_function_->ParameterVariable(0);
2479 *implicit_checks += TestDelayedTypeArgs(closure, /*present=*/{},
2480 /*absent=*/check_bounds);
2481 } else {
2482 *implicit_checks += check_bounds;
2483 }
2484}
2485
2486void FlowGraphBuilder::BuildArgumentTypeChecks(
2487 Fragment* explicit_checks,
2488 Fragment* implicit_checks,
2489 Fragment* implicit_redefinitions) {
2490 const Function& dart_function = parsed_function_->function();
2491
2492 const Function* forwarding_target = nullptr;
2493 if (parsed_function_->is_forwarding_stub()) {
2494 forwarding_target = parsed_function_->forwarding_stub_super_target();
2495 ASSERT(!forwarding_target->IsNull());
2496 }
2497
2498 const intptr_t num_params = dart_function.NumParameters();
2499 for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
2500 ++i) {
2501 LocalVariable* param = parsed_function_->ParameterVariable(i);
2502 const String& name = param->name();
2503 if (!param->needs_type_check()) {
2504 continue;
2505 }
2506 if (param->is_captured()) {
2507 param = parsed_function_->RawParameterVariable(i);
2508 }
2509
2510 const AbstractType* target_type = &param->static_type();
2511 if (forwarding_target != nullptr) {
2512 // We add 1 to the parameter index to account for the receiver.
2513 target_type =
2514 &AbstractType::ZoneHandle(Z, forwarding_target->ParameterTypeAt(i));
2515 }
2516
2517 if (target_type->IsTopTypeForSubtyping()) continue;
2518
2519 const bool is_covariant = param->is_explicit_covariant_parameter();
2520 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2521
2522 *checks += LoadLocal(param);
2523 *checks += AssertAssignableLoadTypeArguments(
2524 param->token_pos(), *target_type, name,
2525 AssertAssignableInstr::kParameterCheck);
2526 *checks += StoreLocal(param);
2527 *checks += Drop();
2528
2529 if (!is_covariant && implicit_redefinitions != nullptr && optimizing_) {
2530 // We generate slightly different code in optimized vs. un-optimized code,
2531 // which is ok since we don't allocate any deopt ids.
2532 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2533
2534 *implicit_redefinitions += LoadLocal(param);
2535 *implicit_redefinitions += RedefinitionWithType(*target_type);
2536 *implicit_redefinitions += StoreLocal(TokenPosition::kNoSource, param);
2537 *implicit_redefinitions += Drop();
2538 }
2539 }
2540}
2541
2542BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2543 PrologueInfo* prologue_info) {
2544 const bool compiling_for_osr = IsCompiledForOsr();
2545
2546 kernel::PrologueBuilder prologue_builder(
2547 parsed_function_, last_used_block_id_, compiling_for_osr, IsInlining());
2548 BlockEntryInstr* instruction_cursor =
2549 prologue_builder.BuildPrologue(normal_entry, prologue_info);
2550
2551 last_used_block_id_ = prologue_builder.last_used_block_id();
2552
2553 return instruction_cursor;
2554}
2555
2556ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(const Function& function) {
2557 if (!function.HasOptionalNamedParameters()) {
2558 return Array::null();
2559 }
2560
2561 const intptr_t num_fixed_params = function.num_fixed_parameters();
2562 const intptr_t num_opt_params = function.NumOptionalNamedParameters();
2563 const auto& names = Array::Handle(Z, Array::New(num_opt_params, Heap::kOld));
2564 auto& name = String::Handle(Z);
2565 for (intptr_t i = 0; i < num_opt_params; ++i) {
2566 name = function.ParameterNameAt(num_fixed_params + i);
2567 names.SetAt(i, name);
2568 }
2569 return names.ptr();
2570}
2571
2572Fragment FlowGraphBuilder::PushExplicitParameters(
2573 const Function& function,
2574 const Function& target /* = Function::null_function()*/) {
2575 Fragment instructions;
2576 for (intptr_t i = function.NumImplicitParameters(),
2577 n = function.NumParameters();
2578 i < n; ++i) {
2579 Fragment push_param = LoadLocal(parsed_function_->ParameterVariable(i));
2580 if (!target.IsNull() && target.is_unboxed_parameter_at(i)) {
2581 Representation to;
2582 if (target.is_unboxed_integer_parameter_at(i)) {
2583 to = kUnboxedInt64;
2584 } else {
2585 ASSERT(target.is_unboxed_double_parameter_at(i));
2586 to = kUnboxedDouble;
2587 }
2588 const auto unbox = UnboxInstr::Create(to, Pop(), DeoptId::kNone,
2589 Instruction::kNotSpeculative);
2590 Push(unbox);
2591 push_param += Fragment(unbox);
2592 }
2593 instructions += push_param;
2594 }
2595 return instructions;
2596}
2597
2598FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2599 const Function& method) {
2600 // A method extractor is the implicit getter for a method.
2601 const Function& function =
2602 Function::ZoneHandle(Z, method.extracted_method_closure());
2603
2604 graph_entry_ =
2605 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2606
2607 auto normal_entry = BuildFunctionEntry(graph_entry_);
2608 graph_entry_->set_normal_entry(normal_entry);
2609
2610 Fragment body(normal_entry);
2611 body += CheckStackOverflowInPrologue(method.token_pos());
2612 body += BuildImplicitClosureCreation(TokenPosition::kNoSource, function);
2613 body += Return(TokenPosition::kNoSource);
2614
2615 // There is no prologue code for a method extractor.
2616 PrologueInfo prologue_info(-1, -1);
2617 return new (Z)
2618 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2619 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2620}
2621
2622FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2623 const Function& function) {
2624 // This function is specialized for a receiver class, a method name, and
2625 // the arguments descriptor at a call site.
2626 const ArgumentsDescriptor descriptor(saved_args_desc_array());
2627
2628 graph_entry_ =
2629 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2630
2631 auto normal_entry = BuildFunctionEntry(graph_entry_);
2632 graph_entry_->set_normal_entry(normal_entry);
2633
2634 PrologueInfo prologue_info(-1, -1);
2635 BlockEntryInstr* instruction_cursor =
2636 BuildPrologue(normal_entry, &prologue_info);
2637
2638 Fragment body(instruction_cursor);
2639 body += CheckStackOverflowInPrologue(function.token_pos());
2640
2641 // The receiver is the first argument to noSuchMethod, and it is the first
2642 // argument passed to the dispatcher function.
2643 body += LoadLocal(parsed_function_->ParameterVariable(0));
2644
2645 // The second argument to noSuchMethod is an invocation mirror. Push the
2646 // arguments for allocating the invocation mirror. First, the name.
2647 body += Constant(String::ZoneHandle(Z, function.name()));
2648
2649 // Second, the arguments descriptor.
2650 body += Constant(saved_args_desc_array());
2651
2652 // Third, an array containing the original arguments. Create it and fill
2653 // it in.
2654 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2655 body += Constant(TypeArguments::ZoneHandle(Z, TypeArguments::null()));
2656 body += IntConstant(receiver_index + descriptor.Size());
2657 body += CreateArray();
2658 LocalVariable* array = MakeTemporary();
2659 if (receiver_index > 0) {
2660 LocalVariable* type_args = parsed_function_->function_type_arguments();
2661 ASSERT(type_args != nullptr);
2662 body += LoadLocal(array);
2663 body += IntConstant(0);
2664 body += LoadLocal(type_args);
2665 body += StoreIndexed(kArrayCid);
2666 }
2667 for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
2668 body += LoadLocal(array);
2669 body += IntConstant(receiver_index + i);
2670 body += LoadLocal(parsed_function_->ParameterVariable(i));
2671 body += StoreIndexed(kArrayCid);
2672 }
2673 String& name = String::Handle(Z);
2674 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
2675 const intptr_t parameter_index = descriptor.PositionAt(i);
2676 name = descriptor.NameAt(i);
2677 name = Symbols::New(H.thread(), name);
2678 body += LoadLocal(array);
2679 body += IntConstant(receiver_index + parameter_index);
2680 body += LoadLocal(parsed_function_->ParameterVariable(parameter_index));
2681 body += StoreIndexed(kArrayCid);
2682 }
2683
2684 // Fourth, false indicating this is not a super NoSuchMethod.
2685 body += Constant(Bool::False());
2686
2687 const Class& mirror_class =
2688 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
2689 ASSERT(!mirror_class.IsNull());
2690 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
2691 ASSERT(error == Error::null());
2692 const Function& allocation_function = Function::ZoneHandle(
2693 Z, mirror_class.LookupStaticFunction(
2694 Library::PrivateCoreLibName(Symbols::AllocateInvocationMirror())));
2695 ASSERT(!allocation_function.IsNull());
2696 body += StaticCall(TokenPosition::kMinSource, allocation_function,
2697 /* argument_count = */ 4, ICData::kStatic);
2698
2699 const int kTypeArgsLen = 0;
2700 ArgumentsDescriptor two_arguments(
2701 Array::Handle(Z, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, 2)));
2702 Function& no_such_method =
2703 Function::ZoneHandle(Z, Resolver::ResolveDynamicForReceiverClass(
2704 Class::Handle(Z, function.Owner()),
2705 Symbols::NoSuchMethod(), two_arguments));
2706 if (no_such_method.IsNull()) {
2707 // If noSuchMethod is not found on the receiver class, call
2708 // Object.noSuchMethod.
2709 no_such_method = Resolver::ResolveDynamicForReceiverClass(
2710 Class::Handle(Z, IG->object_store()->object_class()),
2711 Symbols::NoSuchMethod(), two_arguments);
2712 }
2713 body += StaticCall(TokenPosition::kMinSource, no_such_method,
2714 /* argument_count = */ 2, ICData::kNSMDispatch);
2715 body += Return(TokenPosition::kNoSource);
2716
2717 return new (Z)
2718 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2719 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2720}
2721
2722FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2723 const Function& function) {
2724 graph_entry_ =
2725 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2726
2727 auto normal_entry = BuildFunctionEntry(graph_entry_);
2728 graph_entry_->set_normal_entry(normal_entry);
2729
2730 JoinEntryInstr* nsm = BuildJoinEntry();
2731 JoinEntryInstr* done = BuildJoinEntry();
2732
2733 Fragment body(normal_entry);
2734 body += CheckStackOverflowInPrologue(function.token_pos());
2735
2736 String& name = String::ZoneHandle(Z, function.name());
2737 ASSERT(Field::IsGetterName(name));
2738 name = Field::NameFromGetter(name);
2739
2740 // Get an array of field names.
2741 const Class& cls = Class::Handle(Z, IG->class_table()->At(kRecordCid));
2742 const auto& error = cls.EnsureIsFinalized(thread_);
2743 ASSERT(error == Error::null());
2744 const Function& get_field_names_function = Function::ZoneHandle(
2745 Z, cls.LookupFunctionAllowPrivate(Symbols::Get_fieldNames()));
2746 ASSERT(!get_field_names_function.IsNull());
2747 body += LoadLocal(parsed_function_->receiver_var());
2748 body += StaticCall(TokenPosition::kNoSource, get_field_names_function, 1,
2749 ICData::kNoRebind);
2750 LocalVariable* field_names = MakeTemporary("field_names");
2751
2752 body += LoadLocal(field_names);
2753 body += LoadNativeField(Slot::Array_length());
2754 LocalVariable* num_named = MakeTemporary("num_named");
2755
2756 // num_positional = num_fields - field_names.length
2757 body += LoadLocal(parsed_function_->receiver_var());
2758 body += LoadNativeField(Slot::Record_shape());
2759 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
2760 body += SmiBinaryOp(Token::kBIT_AND);
2761 body += LoadLocal(num_named);
2762 body += SmiBinaryOp(Token::kSUB);
2763 LocalVariable* num_positional = MakeTemporary("num_positional");
2764
2765 const intptr_t field_index =
2766 Record::GetPositionalFieldIndexFromFieldName(name);
2767 if (field_index >= 0) {
2768 // Get positional record field by index.
2769 body += IntConstant(field_index);
2770 body += LoadLocal(num_positional);
2771 body += SmiRelationalOp(Token::kLT);
2772 TargetEntryInstr* valid_index;
2773 TargetEntryInstr* invalid_index;
2774 body += BranchIfTrue(&valid_index, &invalid_index);
2775
2776 body.current = valid_index;
2777 body += LoadLocal(parsed_function_->receiver_var());
2778 body += LoadNativeField(Slot::GetRecordFieldSlot(
2779 thread_, compiler::target::Record::field_offset(field_index)));
2780
2781 body += StoreLocal(TokenPosition::kNoSource,
2782 parsed_function_->expression_temp_var());
2783 body += Drop();
2784 body += Goto(done);
2785
2786 body.current = invalid_index;
2787 }
2788
2789 // Search field among named fields.
2790 body += IntConstant(0);
2791 body += LoadLocal(num_named);
2792 body += SmiRelationalOp(Token::kLT);
2793 TargetEntryInstr* has_named_fields;
2794 TargetEntryInstr* no_named_fields;
2795 body += BranchIfTrue(&has_named_fields, &no_named_fields);
2796
2797 Fragment(no_named_fields) + Goto(nsm);
2798 body.current = has_named_fields;
2799
2800 LocalVariable* index = parsed_function_->expression_temp_var();
2801 body += IntConstant(0);
2802 body += StoreLocal(TokenPosition::kNoSource, index);
2803 body += Drop();
2804
2805 JoinEntryInstr* loop = BuildJoinEntry();
2806 body += Goto(loop);
2807 body.current = loop;
2808
2809 body += LoadLocal(field_names);
2810 body += LoadLocal(index);
2811 body += LoadIndexed(kArrayCid,
2812 /*index_scale*/ compiler::target::kCompressedWordSize);
2813 body += Constant(name);
2814 TargetEntryInstr* found;
2815 TargetEntryInstr* continue_search;
2816 body += BranchIfEqual(&found, &continue_search);
2817
2818 body.current = continue_search;
2819 body += LoadLocal(index);
2820 body += IntConstant(1);
2821 body += SmiBinaryOp(Token::kADD);
2822 body += StoreLocal(TokenPosition::kNoSource, index);
2823 body += Drop();
2824
2825 body += LoadLocal(index);
2826 body += LoadLocal(num_named);
2827 body += SmiRelationalOp(Token::kLT);
2828 TargetEntryInstr* has_more_fields;
2829 TargetEntryInstr* no_more_fields;
2830 body += BranchIfTrue(&has_more_fields, &no_more_fields);
2831
2832 Fragment(has_more_fields) + Goto(loop);
2833 Fragment(no_more_fields) + Goto(nsm);
2834
2835 body.current = found;
2836
2837 body += LoadLocal(parsed_function_->receiver_var());
2838
2839 body += LoadLocal(num_positional);
2840 body += LoadLocal(index);
2841 body += SmiBinaryOp(Token::kADD);
2842
2843 body += LoadIndexed(kRecordCid,
2844 /*index_scale*/ compiler::target::kCompressedWordSize);
2845
2846 body += StoreLocal(TokenPosition::kNoSource,
2847 parsed_function_->expression_temp_var());
2848 body += Drop();
2849 body += Goto(done);
2850
2851 body.current = done;
2852
2853 body += LoadLocal(parsed_function_->expression_temp_var());
2854 body += DropTempsPreserveTop(3); // field_names, num_named, num_positional
2855 body += Return(TokenPosition::kNoSource);
2856
2857 Fragment throw_nsm(nsm);
2858 throw_nsm += LoadLocal(parsed_function_->receiver_var());
2859 throw_nsm += ThrowNoSuchMethodError(TokenPosition::kNoSource, function,
2860 /*incompatible_arguments=*/false,
2861 /*receiver_pushed=*/true);
2862 throw_nsm += ThrowException(TokenPosition::kNoSource); // Close graph.
2863
2864 // There is no prologue code for a record field getter.
2865 PrologueInfo prologue_info(-1, -1);
2866 return new (Z)
2867 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2868 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2869}
2870
2871// Information used by the various dynamic closure call fragment builders.
2872struct FlowGraphBuilder::ClosureCallInfo {
2873 ClosureCallInfo(LocalVariable* closure,
2874 JoinEntryInstr* throw_no_such_method,
2875 const Array& arguments_descriptor_array,
2876 ParsedFunction::DynamicClosureCallVars* const vars)
2878 throw_no_such_method(ASSERT_NOTNULL(throw_no_such_method)),
2879 descriptor(arguments_descriptor_array),
2881
2882 LocalVariable* const closure;
2883 JoinEntryInstr* const throw_no_such_method;
2884 const ArgumentsDescriptor descriptor;
2885 ParsedFunction::DynamicClosureCallVars* const vars;
2886
2887 // Set up by BuildClosureCallDefaultTypeHandling() when needed. These values
2888 // are read-only, so they don't need real local variables and are created
2889 // using MakeTemporary().
2890 LocalVariable* signature = nullptr;
2891 LocalVariable* num_fixed_params = nullptr;
2892 LocalVariable* num_opt_params = nullptr;
2893 LocalVariable* num_max_params = nullptr;
2894 LocalVariable* has_named_params = nullptr;
2895 LocalVariable* named_parameter_names = nullptr;
2896 LocalVariable* parameter_types = nullptr;
2897 LocalVariable* type_parameters = nullptr;
2898 LocalVariable* num_type_parameters = nullptr;
2899 LocalVariable* type_parameter_flags = nullptr;
2900 LocalVariable* instantiator_type_args = nullptr;
2901 LocalVariable* parent_function_type_args = nullptr;
2902 LocalVariable* num_parent_type_args = nullptr;
2903};
2904
2905Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2906 const ClosureCallInfo& info,
2907 Fragment generic,
2908 Fragment not_generic) {
2909 JoinEntryInstr* after_branch = BuildJoinEntry();
2910
2911 Fragment check;
2912 check += LoadLocal(info.type_parameters);
2913 TargetEntryInstr* is_not_generic;
2914 TargetEntryInstr* is_generic;
2915 check += BranchIfNull(&is_not_generic, &is_generic);
2916
2917 generic.Prepend(is_generic);
2918 generic += Goto(after_branch);
2919
2920 not_generic.Prepend(is_not_generic);
2921 not_generic += Goto(after_branch);
2922
2923 return Fragment(check.entry, after_branch);
2924}
2925
2926Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2927 const ClosureCallInfo& info,
2928 Fragment set,
2929 Fragment not_set) {
2930 Fragment check_required;
2931 // We calculate the index to dereference in the parameter names array.
2932 check_required += LoadLocal(info.vars->current_param_index);
2933 check_required +=
2934 IntConstant(compiler::target::kNumParameterFlagsPerElementLog2);
2935 check_required += SmiBinaryOp(Token::kSHR);
2936 check_required += LoadLocal(info.num_opt_params);
2937 check_required += SmiBinaryOp(Token::kADD);
2938 LocalVariable* flags_index = MakeTemporary("flags_index"); // Read-only.
2939
2940 // One read-only stack value (flag_index) that must be dropped
2941 // after we rejoin at after_check.
2942 JoinEntryInstr* after_check = BuildJoinEntry();
2943
2944 // Now we check to see if the flags index is within the bounds of the
2945 // parameters names array. If not, it cannot be required.
2946 check_required += LoadLocal(flags_index);
2947 check_required += LoadLocal(info.named_parameter_names);
2948 check_required += LoadNativeField(Slot::Array_length());
2949 check_required += SmiRelationalOp(Token::kLT);
2950 TargetEntryInstr* valid_index;
2951 TargetEntryInstr* invalid_index;
2952 check_required += BranchIfTrue(&valid_index, &invalid_index);
2953
2954 JoinEntryInstr* join_not_set = BuildJoinEntry();
2955
2956 Fragment(invalid_index) + Goto(join_not_set);
2957
2958 // Otherwise, we need to retrieve the value. We're guaranteed the Smis in
2959 // the flag slots are non-null, so after loading we can immediate check
2960 // the required flag bit for the given named parameter.
2961 check_required.current = valid_index;
2962 check_required += LoadLocal(info.named_parameter_names);
2963 check_required += LoadLocal(flags_index);
2964 check_required += LoadIndexed(
2965 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2966 check_required += LoadLocal(info.vars->current_param_index);
2967 check_required +=
2968 IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
2969 check_required += SmiBinaryOp(Token::kBIT_AND);
2970 // If the below changes, we'll need to multiply by the number of parameter
2971 // flags before shifting.
2972 static_assert(compiler::target::kNumParameterFlags == 1,
2973 "IL builder assumes only one flag bit per parameter");
2974 check_required += SmiBinaryOp(Token::kSHR);
2975 check_required +=
2976 IntConstant(1 << compiler::target::kRequiredNamedParameterFlag);
2977 check_required += SmiBinaryOp(Token::kBIT_AND);
2978 check_required += IntConstant(0);
2979 TargetEntryInstr* is_not_set;
2980 TargetEntryInstr* is_set;
2981 check_required += BranchIfEqual(&is_not_set, &is_set);
2982
2983 Fragment(is_not_set) + Goto(join_not_set);
2984
2985 set.Prepend(is_set);
2986 set += Goto(after_check);
2987
2988 not_set.Prepend(join_not_set);
2989 not_set += Goto(after_check);
2990
2991 // After rejoining, drop the introduced temporaries.
2992 check_required.current = after_check;
2993 check_required += DropTemporary(&flags_index);
2994 return check_required;
2995}
2996
2997Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2998 const ClosureCallInfo& info) {
2999 if (info.descriptor.TypeArgsLen() > 0) {
3000 ASSERT(parsed_function_->function_type_arguments() != nullptr);
3001 // A TAV was provided, so we don't need default type argument handling
3002 // and can just take the arguments we were given.
3003 Fragment store_provided;
3004 store_provided += LoadLocal(parsed_function_->function_type_arguments());
3005 store_provided += StoreLocal(info.vars->function_type_args);
3006 store_provided += Drop();
3007 return store_provided;
3008 }
3009
3010 // Load the defaults, instantiating or replacing them with the other type
3011 // arguments as appropriate.
3012 Fragment store_default;
3013 store_default += LoadLocal(info.closure);
3014 store_default += LoadNativeField(Slot::Closure_function());
3015 store_default += LoadNativeField(Slot::Function_data());
3016 LocalVariable* closure_data = MakeTemporary("closure_data");
3017
3018 store_default += LoadLocal(closure_data);
3019 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
3020 ClosureData::PackedInstantiationMode>(Slot::ClosureData_packed_fields());
3021 LocalVariable* default_tav_kind = MakeTemporary("default_tav_kind");
3022
3023 // Two locals to drop after join, closure_data and default_tav_kind.
3024 JoinEntryInstr* done = BuildJoinEntry();
3025
3026 store_default += LoadLocal(default_tav_kind);
3027 TargetEntryInstr* is_instantiated;
3028 TargetEntryInstr* is_not_instantiated;
3029 store_default +=
3030 IntConstant(static_cast<intptr_t>(InstantiationMode::kIsInstantiated));
3031 store_default += BranchIfEqual(&is_instantiated, &is_not_instantiated);
3032 store_default.current = is_not_instantiated; // Check next case.
3033 store_default += LoadLocal(default_tav_kind);
3034 TargetEntryInstr* needs_instantiation;
3035 TargetEntryInstr* can_share;
3036 store_default += IntConstant(
3037 static_cast<intptr_t>(InstantiationMode::kNeedsInstantiation));
3038 store_default += BranchIfEqual(&needs_instantiation, &can_share);
3039 store_default.current = can_share; // Check next case.
3040 store_default += LoadLocal(default_tav_kind);
3041 TargetEntryInstr* can_share_instantiator;
3042 TargetEntryInstr* can_share_function;
3043 store_default += IntConstant(static_cast<intptr_t>(
3044 InstantiationMode::kSharesInstantiatorTypeArguments));
3045 store_default += BranchIfEqual(&can_share_instantiator, &can_share_function);
3046
3047 Fragment instantiated(is_instantiated);
3048 instantiated += LoadLocal(info.type_parameters);
3049 instantiated += LoadNativeField(Slot::TypeParameters_defaults());
3050 instantiated += StoreLocal(info.vars->function_type_args);
3051 instantiated += Drop();
3052 instantiated += Goto(done);
3053
3054 Fragment do_instantiation(needs_instantiation);
3055 // Load the instantiator type arguments.
3056 do_instantiation += LoadLocal(info.instantiator_type_args);
3057 // Load the parent function type arguments. (No local function type arguments
3058 // can be used within the defaults).
3059 do_instantiation += LoadLocal(info.parent_function_type_args);
3060 // Load the default type arguments to instantiate.
3061 do_instantiation += LoadLocal(info.type_parameters);
3062 do_instantiation += LoadNativeField(Slot::TypeParameters_defaults());
3063 do_instantiation += InstantiateDynamicTypeArguments();
3064 do_instantiation += StoreLocal(info.vars->function_type_args);
3065 do_instantiation += Drop();
3066 do_instantiation += Goto(done);
3067
3068 Fragment share_instantiator(can_share_instantiator);
3069 share_instantiator += LoadLocal(info.instantiator_type_args);
3070 share_instantiator += StoreLocal(info.vars->function_type_args);
3071 share_instantiator += Drop();
3072 share_instantiator += Goto(done);
3073
3074 Fragment share_function(can_share_function);
3075 // Since the defaults won't have local type parameters, these must all be
3076 // from the parent function type arguments, so we can just use it.
3077 share_function += LoadLocal(info.parent_function_type_args);
3078 share_function += StoreLocal(info.vars->function_type_args);
3079 share_function += Drop();
3080 share_function += Goto(done);
3081
3082 store_default.current = done; // Return here after branching.
3083 store_default += DropTemporary(&default_tav_kind);
3084 store_default += DropTemporary(&closure_data);
3085
3086 Fragment store_delayed;
3087 store_delayed += LoadLocal(info.closure);
3088 store_delayed += LoadNativeField(Slot::Closure_delayed_type_arguments());
3089 store_delayed += StoreLocal(info.vars->function_type_args);
3090 store_delayed += Drop();
3091
3092 // Use the delayed type args if present, else the default ones.
3093 return TestDelayedTypeArgs(info.closure, store_delayed, store_default);
3094}
3095
3096Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
3097 const ClosureCallInfo& info) {
3098 // When no named arguments are provided, we just need to check for possible
3099 // required named arguments.
3100 if (info.descriptor.NamedCount() == 0) {
3101 // If the below changes, we can no longer assume that flag slots existing
3102 // means there are required parameters.
3103 static_assert(compiler::target::kNumParameterFlags == 1,
3104 "IL builder assumes only one flag bit per parameter");
3105 // No named args were provided, so check for any required named params.
3106 // Here, we assume that the only parameter flag saved is the required bit
3107 // for named parameters. If this changes, we'll need to check each flag
3108 // entry appropriately for any set required bits.
3109 Fragment has_any;
3110 has_any += LoadLocal(info.num_opt_params);
3111 has_any += LoadLocal(info.named_parameter_names);
3112 has_any += LoadNativeField(Slot::Array_length());
3113 TargetEntryInstr* no_required;
3114 TargetEntryInstr* has_required;
3115 has_any += BranchIfEqual(&no_required, &has_required);
3116
3117 Fragment(has_required) + Goto(info.throw_no_such_method);
3118
3119 return Fragment(has_any.entry, no_required);
3120 }
3121
3122 // Otherwise, we need to loop through the parameter names to check the names
3123 // of named arguments for validity (and possibly missing required ones).
3124 Fragment check_names;
3125 check_names += LoadLocal(info.vars->current_param_index);
3126 LocalVariable* old_index = MakeTemporary("old_index"); // Read-only.
3127 check_names += LoadLocal(info.vars->current_num_processed);
3128 LocalVariable* old_processed = MakeTemporary("old_processed"); // Read-only.
3129
3130 // Two local stack values (old_index, old_processed) to drop after rejoining
3131 // at done.
3132 JoinEntryInstr* loop = BuildJoinEntry();
3133 JoinEntryInstr* done = BuildJoinEntry();
3134
3135 check_names += IntConstant(0);
3136 check_names += StoreLocal(info.vars->current_num_processed);
3137 check_names += Drop();
3138 check_names += IntConstant(0);
3139 check_names += StoreLocal(info.vars->current_param_index);
3140 check_names += Drop();
3141 check_names += Goto(loop);
3142
3143 Fragment loop_check(loop);
3144 loop_check += LoadLocal(info.vars->current_param_index);
3145 loop_check += LoadLocal(info.num_opt_params);
3146 loop_check += SmiRelationalOp(Token::kLT);
3147 TargetEntryInstr* no_more;
3148 TargetEntryInstr* more;
3149 loop_check += BranchIfTrue(&more, &no_more);
3150
3151 Fragment(no_more) + Goto(done);
3152
3153 Fragment loop_body(more);
3154 // First load the name we need to check against.
3155 loop_body += LoadLocal(info.named_parameter_names);
3156 loop_body += LoadLocal(info.vars->current_param_index);
3157 loop_body += LoadIndexed(
3158 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3159 LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
3160
3161 // One additional local value on the stack within the loop body (param_name)
3162 // that should be dropped after rejoining at loop_incr.
3163 JoinEntryInstr* loop_incr = BuildJoinEntry();
3164
3165 // Now iterate over the ArgumentsDescriptor names and check for a match.
3166 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3167 const auto& name = String::ZoneHandle(Z, info.descriptor.NameAt(i));
3168 loop_body += Constant(name);
3169 loop_body += LoadLocal(param_name);
3170 TargetEntryInstr* match;
3171 TargetEntryInstr* mismatch;
3172 loop_body += BranchIfEqual(&match, &mismatch);
3173 loop_body.current = mismatch;
3174
3175 // We have a match, so go to the next name after storing the corresponding
3176 // parameter index on the stack and incrementing the number of matched
3177 // arguments. (No need to check the required bit for provided parameters.)
3178 Fragment matched(match);
3179 matched += LoadLocal(info.vars->current_param_index);
3180 matched += LoadLocal(info.num_fixed_params);
3181 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3182 matched += StoreLocal(info.vars->named_argument_parameter_indices.At(i));
3183 matched += Drop();
3184 matched += LoadLocal(info.vars->current_num_processed);
3185 matched += IntConstant(1);
3186 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3187 matched += StoreLocal(info.vars->current_num_processed);
3188 matched += Drop();
3189 matched += Goto(loop_incr);
3190 }
3191
3192 // None of the names in the arguments descriptor matched, so check if this
3193 // is a required parameter.
3194 loop_body += TestClosureFunctionNamedParameterRequired(
3195 info,
3196 /*set=*/Goto(info.throw_no_such_method),
3197 /*not_set=*/{});
3198
3199 loop_body += Goto(loop_incr);
3200
3201 Fragment incr_index(loop_incr);
3202 incr_index += DropTemporary(&param_name);
3203 incr_index += LoadLocal(info.vars->current_param_index);
3204 incr_index += IntConstant(1);
3205 incr_index += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3206 incr_index += StoreLocal(info.vars->current_param_index);
3207 incr_index += Drop();
3208 incr_index += Goto(loop);
3209
3210 Fragment check_processed(done);
3211 check_processed += LoadLocal(info.vars->current_num_processed);
3212 check_processed += IntConstant(info.descriptor.NamedCount());
3213 TargetEntryInstr* all_processed;
3214 TargetEntryInstr* bad_name;
3215 check_processed += BranchIfEqual(&all_processed, &bad_name);
3216
3217 // Didn't find a matching parameter name for at least one argument name.
3218 Fragment(bad_name) + Goto(info.throw_no_such_method);
3219
3220 // Drop the temporaries at the end of the fragment.
3221 check_names.current = all_processed;
3222 check_names += LoadLocal(old_processed);
3223 check_names += StoreLocal(info.vars->current_num_processed);
3224 check_names += Drop();
3225 check_names += DropTemporary(&old_processed);
3226 check_names += LoadLocal(old_index);
3227 check_names += StoreLocal(info.vars->current_param_index);
3228 check_names += Drop();
3229 check_names += DropTemporary(&old_index);
3230 return check_names;
3231}
3232
3233Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
3234 const ClosureCallInfo& info) {
3235 Fragment check_entry;
3236 // We only need to check the length of any explicitly provided type arguments.
3237 if (info.descriptor.TypeArgsLen() > 0) {
3238 Fragment check_type_args_length;
3239 check_type_args_length += LoadLocal(info.type_parameters);
3240 TargetEntryInstr* null;
3241 TargetEntryInstr* not_null;
3242 check_type_args_length += BranchIfNull(&null, &not_null);
3243 check_type_args_length.current = not_null; // Continue in non-error case.
3244 check_type_args_length += LoadLocal(info.signature);
3245 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
3246 UntaggedFunctionType::PackedNumTypeParameters>(
3247 Slot::FunctionType_packed_type_parameter_counts());
3248 check_type_args_length += IntConstant(info.descriptor.TypeArgsLen());
3249 TargetEntryInstr* equal;
3250 TargetEntryInstr* not_equal;
3251 check_type_args_length += BranchIfEqual(&equal, &not_equal);
3252 check_type_args_length.current = equal; // Continue in non-error case.
3253
3254 // The function is not generic.
3255 Fragment(null) + Goto(info.throw_no_such_method);
3256
3257 // An incorrect number of type arguments were passed.
3258 Fragment(not_equal) + Goto(info.throw_no_such_method);
3259
3260 // Type arguments should not be provided if there are delayed type
3261 // arguments, as then the closure itself is not generic.
3262 check_entry += TestDelayedTypeArgs(
3263 info.closure, /*present=*/Goto(info.throw_no_such_method),
3264 /*absent=*/check_type_args_length);
3265 }
3266
3267 check_entry += LoadLocal(info.has_named_params);
3268 TargetEntryInstr* has_named;
3269 TargetEntryInstr* has_positional;
3270 check_entry += BranchIfTrue(&has_named, &has_positional);
3271 JoinEntryInstr* join_after_optional = BuildJoinEntry();
3272 check_entry.current = join_after_optional;
3273
3274 if (info.descriptor.NamedCount() > 0) {
3275 // No reason to continue checking, as this function doesn't take named args.
3276 Fragment(has_positional) + Goto(info.throw_no_such_method);
3277 } else {
3278 Fragment check_pos(has_positional);
3279 check_pos += LoadLocal(info.num_fixed_params);
3280 check_pos += IntConstant(info.descriptor.PositionalCount());
3281 check_pos += SmiRelationalOp(Token::kLTE);
3282 TargetEntryInstr* enough;
3283 TargetEntryInstr* too_few;
3284 check_pos += BranchIfTrue(&enough, &too_few);
3285 check_pos.current = enough;
3286
3287 Fragment(too_few) + Goto(info.throw_no_such_method);
3288
3289 check_pos += IntConstant(info.descriptor.PositionalCount());
3290 check_pos += LoadLocal(info.num_max_params);
3291 check_pos += SmiRelationalOp(Token::kLTE);
3292 TargetEntryInstr* valid;
3293 TargetEntryInstr* too_many;
3294 check_pos += BranchIfTrue(&valid, &too_many);
3295 check_pos.current = valid;
3296
3297 Fragment(too_many) + Goto(info.throw_no_such_method);
3298
3299 check_pos += Goto(join_after_optional);
3300 }
3301
3302 Fragment check_named(has_named);
3303
3304 TargetEntryInstr* same;
3305 TargetEntryInstr* different;
3306 check_named += LoadLocal(info.num_fixed_params);
3307 check_named += IntConstant(info.descriptor.PositionalCount());
3308 check_named += BranchIfEqual(&same, &different);
3309 check_named.current = same;
3310
3311 Fragment(different) + Goto(info.throw_no_such_method);
3312
3313 if (info.descriptor.NamedCount() > 0) {
3314 check_named += IntConstant(info.descriptor.NamedCount());
3315 check_named += LoadLocal(info.num_opt_params);
3316 check_named += SmiRelationalOp(Token::kLTE);
3317 TargetEntryInstr* valid;
3318 TargetEntryInstr* too_many;
3319 check_named += BranchIfTrue(&valid, &too_many);
3320 check_named.current = valid;
3321
3322 Fragment(too_many) + Goto(info.throw_no_such_method);
3323 }
3324
3325 // Check the names for optional arguments. If applicable, also check that all
3326 // required named parameters are provided.
3327 check_named += BuildClosureCallNamedArgumentsCheck(info);
3328 check_named += Goto(join_after_optional);
3329
3330 check_entry.current = join_after_optional;
3331 return check_entry;
3332}
3333
3334Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
3335 const ClosureCallInfo& info) {
3336 JoinEntryInstr* done = BuildJoinEntry();
3337 JoinEntryInstr* loop = BuildJoinEntry();
3338
3339 // We assume that the value stored in :t_type_parameters is not null (i.e.,
3340 // the function stored in :t_function is generic).
3341 Fragment loop_init;
3342
3343 // A null bounds vector represents a vector of dynamic and no check is needed.
3344 loop_init += LoadLocal(info.type_parameters);
3345 loop_init += LoadNativeField(Slot::TypeParameters_bounds());
3346 TargetEntryInstr* null_bounds;
3347 TargetEntryInstr* non_null_bounds;
3348 loop_init += BranchIfNull(&null_bounds, &non_null_bounds);
3349
3350 Fragment(null_bounds) + Goto(done);
3351
3352 loop_init.current = non_null_bounds;
3353 // Loop over the type parameters array.
3354 loop_init += IntConstant(0);
3355 loop_init += StoreLocal(info.vars->current_param_index);
3356 loop_init += Drop();
3357 loop_init += Goto(loop);
3358
3359 Fragment loop_check(loop);
3360 loop_check += LoadLocal(info.vars->current_param_index);
3361 loop_check += LoadLocal(info.num_type_parameters);
3362 loop_check += SmiRelationalOp(Token::kLT);
3363 TargetEntryInstr* more;
3364 TargetEntryInstr* no_more;
3365 loop_check += BranchIfTrue(&more, &no_more);
3366
3367 Fragment(no_more) + Goto(done);
3368
3369 Fragment loop_test_flag(more);
3370 JoinEntryInstr* next = BuildJoinEntry();
3371 JoinEntryInstr* check = BuildJoinEntry();
3372 loop_test_flag += LoadLocal(info.type_parameter_flags);
3373 TargetEntryInstr* null_flags;
3374 TargetEntryInstr* non_null_flags;
3375 loop_test_flag += BranchIfNull(&null_flags, &non_null_flags);
3376
3377 Fragment(null_flags) + Goto(check); // Check type if null (non-covariant).
3378
3379 loop_test_flag.current = non_null_flags; // Test flags if not null.
3380 loop_test_flag += LoadLocal(info.type_parameter_flags);
3381 loop_test_flag += LoadLocal(info.vars->current_param_index);
3382 loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiShift);
3383 loop_test_flag += SmiBinaryOp(Token::kSHR);
3384 loop_test_flag += LoadIndexed(
3385 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3386 loop_test_flag += LoadLocal(info.vars->current_param_index);
3387 loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiMask);
3388 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3389 loop_test_flag += SmiBinaryOp(Token::kSHR);
3390 loop_test_flag += IntConstant(1);
3391 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3392 loop_test_flag += IntConstant(0);
3393 TargetEntryInstr* is_noncovariant;
3394 TargetEntryInstr* is_covariant;
3395 loop_test_flag += BranchIfEqual(&is_noncovariant, &is_covariant);
3396
3397 Fragment(is_covariant) + Goto(next); // Continue if covariant.
3398 Fragment(is_noncovariant) + Goto(check); // Check type if non-covariant.
3399
3400 Fragment loop_prep_type_param(check);
3401 JoinEntryInstr* dynamic_type_param = BuildJoinEntry();
3402 JoinEntryInstr* call = BuildJoinEntry();
3403
3404 // Load type argument already stored in function_type_args if non null.
3405 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3406 TargetEntryInstr* null_ftav;
3407 TargetEntryInstr* non_null_ftav;
3408 loop_prep_type_param += BranchIfNull(&null_ftav, &non_null_ftav);
3409
3410 Fragment(null_ftav) + Goto(dynamic_type_param);
3411
3412 loop_prep_type_param.current = non_null_ftav;
3413 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3414 loop_prep_type_param += LoadLocal(info.vars->current_param_index);
3415 loop_prep_type_param += LoadLocal(info.num_parent_type_args);
3416 loop_prep_type_param += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3417 loop_prep_type_param += LoadIndexed(
3418 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3419 loop_prep_type_param += StoreLocal(info.vars->current_type_param);
3420 loop_prep_type_param += Drop();
3421 loop_prep_type_param += Goto(call);
3422
3423 Fragment loop_dynamic_type_param(dynamic_type_param);
3424 // If function_type_args is null, the instantiated type param is dynamic.
3425 loop_dynamic_type_param += Constant(Type::ZoneHandle(Type::DynamicType()));
3426 loop_dynamic_type_param += StoreLocal(info.vars->current_type_param);
3427 loop_dynamic_type_param += Drop();
3428 loop_dynamic_type_param += Goto(call);
3429
3430 Fragment loop_call_check(call);
3431 // Load instantiators.
3432 loop_call_check += LoadLocal(info.instantiator_type_args);
3433 loop_call_check += LoadLocal(info.vars->function_type_args);
3434 // Load instantiated type parameter.
3435 loop_call_check += LoadLocal(info.vars->current_type_param);
3436 // Load bound from type parameters.
3437 loop_call_check += LoadLocal(info.type_parameters);
3438 loop_call_check += LoadNativeField(Slot::TypeParameters_bounds());
3439 loop_call_check += LoadLocal(info.vars->current_param_index);
3440 loop_call_check += LoadIndexed(
3441 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3442 // Load (canonicalized) name of type parameter in signature.
3443 loop_call_check += LoadLocal(info.type_parameters);
3444 loop_call_check += LoadNativeField(Slot::TypeParameters_names());
3445 loop_call_check += LoadLocal(info.vars->current_param_index);
3446 loop_call_check += LoadIndexed(
3447 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3448 // Assert that the passed-in type argument is consistent with the bound of
3449 // the corresponding type parameter.
3450 loop_call_check += AssertSubtype(TokenPosition::kNoSource);
3451 loop_call_check += Goto(next);
3452
3453 Fragment loop_incr(next);
3454 loop_incr += LoadLocal(info.vars->current_param_index);
3455 loop_incr += IntConstant(1);
3456 loop_incr += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3457 loop_incr += StoreLocal(info.vars->current_param_index);
3458 loop_incr += Drop();
3459 loop_incr += Goto(loop);
3460
3461 return Fragment(loop_init.entry, done);
3462}
3463
3464Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
3465 const ClosureCallInfo& info,
3466 LocalVariable* param_index,
3467 intptr_t arg_index,
3468 const String& arg_name) {
3469 Fragment instructions;
3470
3471 // Load value.
3472 instructions += LoadLocal(parsed_function_->ParameterVariable(arg_index));
3473 // Load destination type.
3474 instructions += LoadLocal(info.parameter_types);
3475 instructions += LoadLocal(param_index);
3476 instructions += LoadIndexed(
3477 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3478 // Load instantiator type arguments.
3479 instructions += LoadLocal(info.instantiator_type_args);
3480 // Load the full set of function type arguments.
3481 instructions += LoadLocal(info.vars->function_type_args);
3482 // Check that the value has the right type.
3483 instructions += AssertAssignable(TokenPosition::kNoSource, arg_name,
3484 AssertAssignableInstr::kParameterCheck);
3485 // Make sure to store the result to keep data dependencies accurate.
3486 instructions += StoreLocal(parsed_function_->ParameterVariable(arg_index));
3487 instructions += Drop();
3488
3489 return instructions;
3490}
3491
3492Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3493 const ClosureCallInfo& info) {
3494 Fragment instructions;
3495
3496 // Only check explicit arguments (i.e., skip the receiver), as the receiver
3497 // is always assignable to its type (stored as dynamic).
3498 for (intptr_t i = 1; i < info.descriptor.PositionalCount(); i++) {
3499 instructions += IntConstant(i);
3500 LocalVariable* param_index = MakeTemporary("param_index");
3501 // We don't have a compile-time name, so this symbol signals the runtime
3502 // that it should recreate the type check using info from the stack.
3503 instructions += BuildClosureCallArgumentTypeCheck(
3504 info, param_index, i, Symbols::dynamic_assert_assignable_stc_check());
3505 instructions += DropTemporary(&param_index);
3506 }
3507
3508 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3509 const intptr_t arg_index = info.descriptor.PositionAt(i);
3510 auto const param_index = info.vars->named_argument_parameter_indices.At(i);
3511 // We have a compile-time name available, but we still want the runtime to
3512 // detect that the generated AssertAssignable instruction is dynamic.
3513 instructions += BuildClosureCallArgumentTypeCheck(
3514 info, param_index, arg_index,
3515 Symbols::dynamic_assert_assignable_stc_check());
3516 }
3517
3518 return instructions;
3519}
3520
3521Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3522 LocalVariable* closure) {
3523 ClosureCallInfo info(closure, BuildThrowNoSuchMethod(),
3524 saved_args_desc_array(),
3525 parsed_function_->dynamic_closure_call_vars());
3526
3527 Fragment body;
3528 body += LoadLocal(info.closure);
3529 body += LoadNativeField(Slot::Closure_function());
3530 body += LoadNativeField(Slot::Function_signature());
3531 info.signature = MakeTemporary("signature");
3532
3533 body += LoadLocal(info.signature);
3534 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3535 FunctionType::PackedNumFixedParameters>(
3536 Slot::FunctionType_packed_parameter_counts());
3537 info.num_fixed_params = MakeTemporary("num_fixed_params");
3538
3539 body += LoadLocal(info.signature);
3540 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3541 FunctionType::PackedNumOptionalParameters>(
3542 Slot::FunctionType_packed_parameter_counts());
3543 info.num_opt_params = MakeTemporary("num_opt_params");
3544
3545 body += LoadLocal(info.num_fixed_params);
3546 body += LoadLocal(info.num_opt_params);
3547 body += SmiBinaryOp(Token::kADD);
3548 info.num_max_params = MakeTemporary("num_max_params");
3549
3550 body += LoadLocal(info.signature);
3551 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3552 FunctionType::PackedHasNamedOptionalParameters>(
3553 Slot::FunctionType_packed_parameter_counts());
3554
3555 body += IntConstant(0);
3556 body += StrictCompare(Token::kNE_STRICT);
3557 info.has_named_params = MakeTemporary("has_named_params");
3558
3559 body += LoadLocal(info.signature);
3560 body += LoadNativeField(Slot::FunctionType_named_parameter_names());
3561 info.named_parameter_names = MakeTemporary("named_parameter_names");
3562
3563 body += LoadLocal(info.signature);
3564 body += LoadNativeField(Slot::FunctionType_parameter_types());
3565 info.parameter_types = MakeTemporary("parameter_types");
3566
3567 body += LoadLocal(info.signature);
3568 body += LoadNativeField(Slot::FunctionType_type_parameters());
3569 info.type_parameters = MakeTemporary("type_parameters");
3570
3571 body += LoadLocal(info.closure);
3572 body += LoadNativeField(Slot::Closure_instantiator_type_arguments());
3573 info.instantiator_type_args = MakeTemporary("instantiator_type_args");
3574
3575 body += LoadLocal(info.closure);
3576 body += LoadNativeField(Slot::Closure_function_type_arguments());
3577 info.parent_function_type_args = MakeTemporary("parent_function_type_args");
3578
3579 // At this point, all the read-only temporaries stored in the ClosureCallInfo
3580 // should be either loaded or still nullptr, if not needed for this function.
3581 // Now we check that the arguments to the closure call have the right shape.
3582 body += BuildClosureCallArgumentsValidCheck(info);
3583
3584 // If the closure function is not generic, there are no local function type
3585 // args. Thus, use whatever was stored for the parent function type arguments,
3586 // which has already been checked against any parent type parameter bounds.
3587 Fragment not_generic;
3588 not_generic += LoadLocal(info.parent_function_type_args);
3589 not_generic += StoreLocal(info.vars->function_type_args);
3590 not_generic += Drop();
3591
3592 // If the closure function is generic, then we first need to calculate the
3593 // full set of function type arguments, then check the local function type
3594 // arguments against the closure function's type parameter bounds.
3595 Fragment generic;
3596 // Calculate the number of parent type arguments and store them in
3597 // info.num_parent_type_args.
3598 generic += LoadLocal(info.signature);
3599 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3600 UntaggedFunctionType::PackedNumParentTypeArguments>(
3601 Slot::FunctionType_packed_type_parameter_counts());
3602 info.num_parent_type_args = MakeTemporary("num_parent_type_args");
3603
3604 // Hoist number of type parameters.
3605 generic += LoadLocal(info.signature);
3606 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3607 UntaggedFunctionType::PackedNumTypeParameters>(
3608 Slot::FunctionType_packed_type_parameter_counts());
3609 info.num_type_parameters = MakeTemporary("num_type_parameters");
3610
3611 // Hoist type parameter flags.
3612 generic += LoadLocal(info.type_parameters);
3613 generic += LoadNativeField(Slot::TypeParameters_flags());
3614 info.type_parameter_flags = MakeTemporary("type_parameter_flags");
3615
3616 // Calculate the local function type arguments and store them in
3617 // info.vars->function_type_args.
3618 generic += BuildClosureCallDefaultTypeHandling(info);
3619
3620 // Load the local function type args.
3621 generic += LoadLocal(info.vars->function_type_args);
3622 // Load the parent function type args.
3623 generic += LoadLocal(info.parent_function_type_args);
3624 // Load the number of parent type parameters.
3625 generic += LoadLocal(info.num_parent_type_args);
3626 // Load the number of total type parameters.
3627 generic += LoadLocal(info.num_parent_type_args);
3628 generic += LoadLocal(info.num_type_parameters);
3629 generic += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3630
3631 // Call the static function for prepending type arguments.
3632 generic += StaticCall(TokenPosition::kNoSource,
3633 PrependTypeArgumentsFunction(), 4, ICData::kStatic);
3634 generic += StoreLocal(info.vars->function_type_args);
3635 generic += Drop();
3636
3637 // Now that we have the full set of function type arguments, check them
3638 // against the type parameter bounds. However, if the local function type
3639 // arguments are delayed type arguments, they have already been checked by
3640 // the type system and need not be checked again at the call site.
3641 auto const check_bounds = BuildClosureCallTypeArgumentsTypeCheck(info);
3642 if (FLAG_eliminate_type_checks) {
3643 generic += TestDelayedTypeArgs(info.closure, /*present=*/{},
3644 /*absent=*/check_bounds);
3645 } else {
3646 generic += check_bounds;
3647 }
3648 generic += DropTemporary(&info.type_parameter_flags);
3649 generic += DropTemporary(&info.num_type_parameters);
3650 generic += DropTemporary(&info.num_parent_type_args);
3651
3652 // Call the appropriate fragment for setting up the function type arguments
3653 // and performing any needed type argument checking.
3654 body += TestClosureFunctionGeneric(info, generic, not_generic);
3655
3656 // Check that the values provided as arguments are assignable to the types
3657 // of the corresponding closure function parameters.
3658 body += BuildClosureCallArgumentTypeChecks(info);
3659
3660 // Drop all the read-only temporaries at the end of the fragment.
3661 body += DropTemporary(&info.parent_function_type_args);
3662 body += DropTemporary(&info.instantiator_type_args);
3663 body += DropTemporary(&info.type_parameters);
3664 body += DropTemporary(&info.parameter_types);
3665 body += DropTemporary(&info.named_parameter_names);
3666 body += DropTemporary(&info.has_named_params);
3667 body += DropTemporary(&info.num_max_params);
3668 body += DropTemporary(&info.num_opt_params);
3669 body += DropTemporary(&info.num_fixed_params);
3670 body += DropTemporary(&info.signature);
3671
3672 return body;
3673}
3674
3675FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3676 const Function& function) {
3677 const ArgumentsDescriptor descriptor(saved_args_desc_array());
3678 // Find the name of the field we should dispatch to.
3679 const Class& owner = Class::Handle(Z, function.Owner());
3680 ASSERT(!owner.IsNull());
3681 auto& field_name = String::Handle(Z, function.name());
3682 // If the field name has a dyn: tag, then remove it. We don't add dynamic
3683 // invocation forwarders for field getters used for invoking, we just use
3684 // the tag in the name of the invoke field dispatcher to detect dynamic calls.
3685 const bool is_dynamic_call =
3686 Function::IsDynamicInvocationForwarderName(field_name);
3687 if (is_dynamic_call) {
3688 field_name = Function::DemangleDynamicInvocationForwarderName(field_name);
3689 }
3690 const String& getter_name = String::ZoneHandle(
3691 Z, Symbols::New(thread_,
3692 String::Handle(Z, Field::GetterSymbol(field_name))));
3693
3694 // Determine if this is `class Closure { get call => this; }`
3695 const Class& closure_class =
3696 Class::Handle(Z, IG->object_store()->closure_class());
3697 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3698 field_name.Equals(Symbols::call());
3699
3700 graph_entry_ =
3701 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3702
3703 auto normal_entry = BuildFunctionEntry(graph_entry_);
3704 graph_entry_->set_normal_entry(normal_entry);
3705
3706 PrologueInfo prologue_info(-1, -1);
3707 BlockEntryInstr* instruction_cursor =
3708 BuildPrologue(normal_entry, &prologue_info);
3709
3710 Fragment body(instruction_cursor);
3711 body += CheckStackOverflowInPrologue(function.token_pos());
3712
3713 // Build any dynamic closure call checks before pushing arguments to the
3714 // final call on the stack to make debugging easier.
3715 LocalVariable* closure = nullptr;
3716 if (is_closure_call) {
3717 closure = parsed_function_->ParameterVariable(0);
3718 if (is_dynamic_call) {
3719 // The whole reason for making this invoke field dispatcher is that
3720 // this closure call needs checking, so we shouldn't inline a call to an
3721 // unchecked entry that can't tail call NSM.
3722 InlineBailout(
3723 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3724
3725 body += BuildDynamicClosureCallChecks(closure);
3726 }
3727 }
3728
3729 if (descriptor.TypeArgsLen() > 0) {
3730 LocalVariable* type_args = parsed_function_->function_type_arguments();
3731 ASSERT(type_args != nullptr);
3732 body += LoadLocal(type_args);
3733 }
3734
3735 if (is_closure_call) {
3736 // The closure itself is the first argument.
3737 body += LoadLocal(closure);
3738 } else {
3739 // Invoke the getter to get the field value.
3740 body += LoadLocal(parsed_function_->ParameterVariable(0));
3741 const intptr_t kTypeArgsLen = 0;
3742 const intptr_t kNumArgsChecked = 1;
3743 body += InstanceCall(TokenPosition::kMinSource, getter_name, Token::kGET,
3744 kTypeArgsLen, 1, Array::null_array(), kNumArgsChecked);
3745 }
3746
3747 // Push all arguments onto the stack.
3748 for (intptr_t pos = 1; pos < descriptor.Count(); pos++) {
3749 body += LoadLocal(parsed_function_->ParameterVariable(pos));
3750 }
3751
3752 // Construct argument names array if necessary.
3753 const Array* argument_names = &Object::null_array();
3754 if (descriptor.NamedCount() > 0) {
3755 const auto& array_handle =
3756 Array::ZoneHandle(Z, Array::New(descriptor.NamedCount(), Heap::kNew));
3757 String& string_handle = String::Handle(Z);
3758 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
3759 const intptr_t named_arg_index =
3760 descriptor.PositionAt(i) - descriptor.PositionalCount();
3761 string_handle = descriptor.NameAt(i);
3762 array_handle.SetAt(named_arg_index, string_handle);
3763 }
3764 argument_names = &array_handle;
3765 }
3766
3767 if (is_closure_call) {
3768 body += LoadLocal(closure);
3769 if (!FLAG_precompiled_mode) {
3770 // Lookup the function in the closure.
3771 body += LoadNativeField(Slot::Closure_function());
3772 }
3773 body += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3774 descriptor.TypeArgsLen(), descriptor.Count(),
3775 *argument_names);
3776 } else {
3777 const intptr_t kNumArgsChecked = 1;
3778 body +=
3779 InstanceCall(TokenPosition::kMinSource,
3780 is_dynamic_call ? Symbols::DynamicCall() : Symbols::call(),
3781 Token::kILLEGAL, descriptor.TypeArgsLen(),
3782 descriptor.Count(), *argument_names, kNumArgsChecked);
3783 }
3784
3785 body += Return(TokenPosition::kNoSource);
3786
3787 return new (Z)
3788 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3789 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
3790}
3791
3792FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3793 const Function& function,
3794 bool is_implicit_closure_function,
3795 bool throw_no_such_method_error) {
3796 graph_entry_ =
3797 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3798
3799 auto normal_entry = BuildFunctionEntry(graph_entry_);
3800 graph_entry_->set_normal_entry(normal_entry);
3801
3802 PrologueInfo prologue_info(-1, -1);
3803 BlockEntryInstr* instruction_cursor =
3804 BuildPrologue(normal_entry, &prologue_info);
3805
3806 Fragment body(instruction_cursor);
3807 body += CheckStackOverflowInPrologue(function.token_pos());
3808
3809 // If we are inside the tearoff wrapper function (implicit closure), we need
3810 // to extract the receiver from the context. We just replace it directly on
3811 // the stack to simplify the rest of the code.
3812 if (is_implicit_closure_function && !function.is_static()) {
3813 if (parsed_function_->has_arg_desc_var()) {
3814 body += LoadArgDescriptor();
3815 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3816 } else {
3817 ASSERT(function.NumOptionalParameters() == 0);
3818 body += IntConstant(function.NumParameters());
3819 }
3820 body += LoadLocal(parsed_function_->current_context_var());
3821 body += StoreFpRelativeSlot(
3822 kWordSize * compiler::target::frame_layout.param_end_from_fp);
3823 }
3824
3825 if (function.NeedsTypeArgumentTypeChecks()) {
3826 BuildTypeArgumentTypeChecks(TypeChecksToBuild::kCheckAllTypeParameterBounds,
3827 &body);
3828 }
3829
3830 if (function.NeedsArgumentTypeChecks()) {
3831 BuildArgumentTypeChecks(&body, &body, nullptr);
3832 }
3833
3834 body += MakeTemp();
3835 LocalVariable* result = MakeTemporary();
3836
3837 // Do "++argument_count" if any type arguments were passed.
3838 LocalVariable* argument_count_var = parsed_function_->expression_temp_var();
3839 body += IntConstant(0);
3840 body += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3841 body += Drop();
3842 if (function.IsGeneric()) {
3843 Fragment then;
3844 Fragment otherwise;
3845 otherwise += IntConstant(1);
3846 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3847 otherwise += Drop();
3848 body += TestAnyTypeArgs(then, otherwise);
3849 }
3850
3851 if (function.HasOptionalParameters()) {
3852 body += LoadArgDescriptor();
3853 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3854 } else {
3855 body += IntConstant(function.NumParameters());
3856 }
3857 body += LoadLocal(argument_count_var);
3858 body += SmiBinaryOp(Token::kADD, /* truncate= */ true);
3859 LocalVariable* argument_count = MakeTemporary();
3860
3861 // We are generating code like the following:
3862 //
3863 // var arguments = new Array<dynamic>(argument_count);
3864 //
3865 // int i = 0;
3866 // if (any type arguments are passed) {
3867 // arguments[0] = function_type_arguments;
3868 // ++i;
3869 // }
3870 //
3871 // for (; i < argument_count; ++i) {
3872 // arguments[i] = LoadFpRelativeSlot(
3873 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3874 // }
3875 body += Constant(TypeArguments::ZoneHandle(Z, TypeArguments::null()));
3876 body += LoadLocal(argument_count);
3877 body += CreateArray();
3878 LocalVariable* arguments = MakeTemporary();
3879
3880 {
3881 // int i = 0
3882 LocalVariable* index = parsed_function_->expression_temp_var();
3883 body += IntConstant(0);
3884 body += StoreLocal(TokenPosition::kNoSource, index);
3885 body += Drop();
3886
3887 // if (any type arguments are passed) {
3888 // arguments[0] = function_type_arguments;
3889 // i = 1;
3890 // }
3891 if (function.IsGeneric()) {
3892 Fragment store;
3893 store += LoadLocal(arguments);
3894 store += IntConstant(0);
3895 store += LoadFunctionTypeArguments();
3896 store += StoreIndexed(kArrayCid);
3897 store += IntConstant(1);
3898 store += StoreLocal(TokenPosition::kNoSource, index);
3899 store += Drop();
3900 body += TestAnyTypeArgs(store, Fragment());
3901 }
3902
3903 TargetEntryInstr* body_entry;
3904 TargetEntryInstr* loop_exit;
3905
3906 Fragment condition;
3907 // i < argument_count
3908 condition += LoadLocal(index);
3909 condition += LoadLocal(argument_count);
3910 condition += SmiRelationalOp(Token::kLT);
3911 condition += BranchIfTrue(&body_entry, &loop_exit, /*negate=*/false);
3912
3913 Fragment loop_body(body_entry);
3914
3915 // arguments[i] = LoadFpRelativeSlot(
3916 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3917 loop_body += LoadLocal(arguments);
3918 loop_body += LoadLocal(index);
3919 loop_body += LoadLocal(argument_count);
3920 loop_body += LoadLocal(index);
3921 loop_body += SmiBinaryOp(Token::kSUB, /*truncate=*/true);
3922 loop_body +=
3923 LoadFpRelativeSlot(compiler::target::kWordSize *
3924 compiler::target::frame_layout.param_end_from_fp,
3925 CompileType::Dynamic());
3926 loop_body += StoreIndexed(kArrayCid);
3927
3928 // ++i
3929 loop_body += LoadLocal(index);
3930 loop_body += IntConstant(1);
3931 loop_body += SmiBinaryOp(Token::kADD, /*truncate=*/true);
3932 loop_body += StoreLocal(TokenPosition::kNoSource, index);
3933 loop_body += Drop();
3934
3935 JoinEntryInstr* join = BuildJoinEntry();
3936 loop_body += Goto(join);
3937
3938 Fragment loop(join);
3939 loop += condition;
3940
3941 Instruction* entry =
3942 new (Z) GotoInstr(join, CompilerState::Current().GetNextDeoptId());
3943 body += Fragment(entry, loop_exit);
3944 }
3945
3946 // Load receiver.
3947 if (is_implicit_closure_function) {
3948 if (throw_no_such_method_error) {
3949 const Function& parent =
3950 Function::ZoneHandle(Z, function.parent_function());
3951 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3952 AbstractType& type = AbstractType::ZoneHandle(Z);
3953 type = Type::New(owner, Object::null_type_arguments());
3954 type = ClassFinalizer::FinalizeType(type);
3955 body += Constant(type);
3956 } else {
3957 body += LoadLocal(parsed_function_->current_context_var());
3958 }
3959 } else {
3960 body += LoadLocal(parsed_function_->ParameterVariable(0));
3961 }
3962
3963 body += Constant(String::ZoneHandle(Z, function.name()));
3964
3965 if (!parsed_function_->has_arg_desc_var()) {
3966 // If there is no variable for the arguments descriptor (this function's
3967 // signature doesn't require it), then we need to create one.
3968 Array& args_desc = Array::ZoneHandle(
3969 Z, ArgumentsDescriptor::NewBoxed(0, function.NumParameters()));
3970 body += Constant(args_desc);
3971 } else {
3972 body += LoadArgDescriptor();
3973 }
3974
3975 body += LoadLocal(arguments);
3976
3977 if (throw_no_such_method_error) {
3978 const Function& parent =
3979 Function::ZoneHandle(Z, function.parent_function());
3980 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3981 InvocationMirror::Level im_level = owner.IsTopLevel()
3982 ? InvocationMirror::kTopLevel
3983 : InvocationMirror::kStatic;
3984 InvocationMirror::Kind im_kind;
3985 if (function.IsImplicitGetterFunction() || function.IsGetterFunction()) {
3986 im_kind = InvocationMirror::kGetter;
3987 } else if (function.IsImplicitSetterFunction() ||
3988 function.IsSetterFunction()) {
3989 im_kind = InvocationMirror::kSetter;
3990 } else {
3991 im_kind = InvocationMirror::kMethod;
3992 }
3993 body += IntConstant(InvocationMirror::EncodeType(im_level, im_kind));
3994 } else {
3995 body += NullConstant();
3996 }
3997
3998 // Push the number of delayed type arguments.
3999 if (function.IsClosureFunction()) {
4000 LocalVariable* closure = parsed_function_->ParameterVariable(0);
4001 Fragment then;
4002 then += IntConstant(function.NumTypeParameters());
4003 then += StoreLocal(TokenPosition::kNoSource, argument_count_var);
4004 then += Drop();
4005 Fragment otherwise;
4006 otherwise += IntConstant(0);
4007 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
4008 otherwise += Drop();
4009 body += TestDelayedTypeArgs(closure, then, otherwise);
4010 body += LoadLocal(argument_count_var);
4011 } else {
4012 body += IntConstant(0);
4013 }
4014
4015 const Class& mirror_class =
4016 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
4017 ASSERT(!mirror_class.IsNull());
4018 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
4019 ASSERT(error == Error::null());
4020 const Function& allocation_function = Function::ZoneHandle(
4021 Z, mirror_class.LookupStaticFunction(Library::PrivateCoreLibName(
4022 Symbols::AllocateInvocationMirrorForClosure())));
4023 ASSERT(!allocation_function.IsNull());
4024 body += StaticCall(TokenPosition::kMinSource, allocation_function,
4025 /* argument_count = */ 5, ICData::kStatic);
4026
4027 if (throw_no_such_method_error) {
4028 const Class& klass = Class::ZoneHandle(
4029 Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
4030 ASSERT(!klass.IsNull());
4031 const auto& error = klass.EnsureIsFinalized(H.thread());
4032 ASSERT(error == Error::null());
4033 const Function& throw_function = Function::ZoneHandle(
4034 Z,
4035 klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNewInvocation()));
4036 ASSERT(!throw_function.IsNull());
4037 body += StaticCall(TokenPosition::kNoSource, throw_function, 2,
4038 ICData::kStatic);
4039 } else {
4040 body += InstanceCall(
4041 TokenPosition::kNoSource, Symbols::NoSuchMethod(), Token::kILLEGAL,
4042 /*type_args_len=*/0, /*argument_count=*/2, Array::null_array(),
4043 /*checked_argument_count=*/1);
4044 }
4045 body += StoreLocal(TokenPosition::kNoSource, result);
4046 body += Drop();
4047
4048 body += Drop(); // arguments
4049 body += Drop(); // argument count
4050
4051 AbstractType& return_type = AbstractType::Handle(function.result_type());
4052 if (!return_type.IsTopTypeForSubtyping()) {
4053 body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
4054 return_type, Symbols::Empty());
4055 }
4056 body += Return(TokenPosition::kNoSource);
4057
4058 return new (Z)
4059 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4060 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4061}
4062
4063Fragment FlowGraphBuilder::BuildDefaultTypeHandling(const Function& function) {
4064 Fragment keep_same, use_defaults;
4065
4066 if (!function.IsGeneric()) return keep_same;
4067
4068 const auto& default_types =
4069 TypeArguments::ZoneHandle(Z, function.DefaultTypeArguments(Z));
4070
4071 if (default_types.IsNull()) return keep_same;
4072
4073 if (function.IsClosureFunction()) {
4074 // Note that we can't use TranslateInstantiatedTypeArguments here as
4075 // that uses LoadInstantiatorTypeArguments() and LoadFunctionTypeArguments()
4076 // for the instantiator and function type argument vectors, but here we
4077 // load the instantiator and parent function type argument vectors from
4078 // the closure object instead.
4079 LocalVariable* const closure = parsed_function_->ParameterVariable(0);
4080 auto const mode = function.default_type_arguments_instantiation_mode();
4081
4082 switch (mode) {
4083 case InstantiationMode::kIsInstantiated:
4084 use_defaults += Constant(default_types);
4085 break;
4086 case InstantiationMode::kSharesInstantiatorTypeArguments:
4087 use_defaults += LoadLocal(closure);
4088 use_defaults +=
4089 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4090 break;
4091 case InstantiationMode::kSharesFunctionTypeArguments:
4092 use_defaults += LoadLocal(closure);
4093 use_defaults +=
4094 LoadNativeField(Slot::Closure_function_type_arguments());
4095 break;
4096 case InstantiationMode::kNeedsInstantiation:
4097 // Only load the instantiator or function type arguments from the
4098 // closure if they're needed for instantiation.
4099 if (!default_types.IsInstantiated(kCurrentClass)) {
4100 use_defaults += LoadLocal(closure);
4101 use_defaults +=
4102 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4103 } else {
4104 use_defaults += NullConstant();
4105 }
4106 if (!default_types.IsInstantiated(kFunctions)) {
4107 use_defaults += LoadLocal(closure);
4108 use_defaults +=
4109 LoadNativeField(Slot::Closure_function_type_arguments());
4110 } else {
4111 use_defaults += NullConstant();
4112 }
4113 use_defaults += InstantiateTypeArguments(default_types);
4114 break;
4115 }
4116 } else {
4117 use_defaults += TranslateInstantiatedTypeArguments(default_types);
4118 }
4119 use_defaults += StoreLocal(parsed_function_->function_type_arguments());
4120 use_defaults += Drop();
4121
4122 return TestAnyTypeArgs(keep_same, use_defaults);
4123}
4124
4125FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
4126 Fragment shared_prologue_linked_in,
4127 Fragment skippable_checks,
4128 Fragment redefinitions_if_skipped,
4129 Fragment body) {
4130 ASSERT(shared_prologue_linked_in.entry == graph_entry_->normal_entry());
4131 ASSERT(parsed_function_->has_entry_points_temp_var());
4132 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
4133
4134 auto* join_entry = BuildJoinEntry();
4135
4136 Fragment normal_entry(shared_prologue_linked_in.entry);
4137 normal_entry +=
4138 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4139 normal_entry += StoreLocal(TokenPosition::kNoSource,
4140 parsed_function_->entry_points_temp_var());
4141 normal_entry += Drop();
4142 normal_entry += Goto(join_entry);
4143
4144 auto* extra_target_entry = BuildFunctionEntry(graph_entry_);
4145 Fragment extra_entry(extra_target_entry);
4146 extra_entry += IntConstant(
4147 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4148 extra_entry += StoreLocal(TokenPosition::kNoSource,
4149 parsed_function_->entry_points_temp_var());
4150 extra_entry += Drop();
4151 extra_entry += Goto(join_entry);
4152
4153 if (prologue_start != nullptr) {
4154 join_entry->LinkTo(prologue_start);
4155 } else {
4156 // Prologue is empty.
4157 shared_prologue_linked_in.current = join_entry;
4158 }
4159
4160 TargetEntryInstr* do_checks;
4161 TargetEntryInstr* skip_checks;
4162 shared_prologue_linked_in +=
4163 LoadLocal(parsed_function_->entry_points_temp_var());
4164 shared_prologue_linked_in += BuildEntryPointsIntrospection();
4165 shared_prologue_linked_in +=
4166 LoadLocal(parsed_function_->entry_points_temp_var());
4167 shared_prologue_linked_in += IntConstant(
4168 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4169 shared_prologue_linked_in +=
4170 BranchIfEqual(&skip_checks, &do_checks, /*negate=*/false);
4171
4172 JoinEntryInstr* rest_entry = BuildJoinEntry();
4173
4174 Fragment(do_checks) + skippable_checks + Goto(rest_entry);
4175 Fragment(skip_checks) + redefinitions_if_skipped + Goto(rest_entry);
4176 Fragment(rest_entry) + body;
4177
4178 return extra_target_entry;
4179}
4180
4181FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
4182 BlockEntryInstr* normal_entry,
4183 Fragment normal_prologue,
4184 Fragment extra_prologue,
4185 Fragment shared_prologue,
4186 Fragment body) {
4187 auto* join_entry = BuildJoinEntry();
4188 auto* extra_entry = BuildFunctionEntry(graph_entry_);
4189
4190 Fragment normal(normal_entry);
4191 normal += IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4192 normal += BuildEntryPointsIntrospection();
4193 normal += normal_prologue;
4194 normal += Goto(join_entry);
4195
4196 Fragment extra(extra_entry);
4197 extra +=
4198 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kSeparate));
4199 extra += BuildEntryPointsIntrospection();
4200 extra += extra_prologue;
4201 extra += Goto(join_entry);
4202
4203 Fragment(join_entry) + shared_prologue + body;
4204 return extra_entry;
4205}
4206
4207FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
4208 const Function& function) {
4209 const Function& parent = Function::ZoneHandle(Z, function.parent_function());
4210 Function& target = Function::ZoneHandle(Z, function.ImplicitClosureTarget(Z));
4211
4212 if (target.IsNull() ||
4213 (parent.num_fixed_parameters() != target.num_fixed_parameters())) {
4214 return BuildGraphOfNoSuchMethodForwarder(function, true,
4215 parent.is_static());
4216 }
4217
4218 graph_entry_ =
4219 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4220
4221 auto normal_entry = BuildFunctionEntry(graph_entry_);
4222 graph_entry_->set_normal_entry(normal_entry);
4223
4224 PrologueInfo prologue_info(-1, -1);
4225 BlockEntryInstr* instruction_cursor =
4226 BuildPrologue(normal_entry, &prologue_info);
4227
4228 Fragment closure(instruction_cursor);
4229 closure += CheckStackOverflowInPrologue(function.token_pos());
4230 closure += BuildDefaultTypeHandling(function);
4231
4232 // For implicit closure functions, any non-covariant checks are either
4233 // performed by the type system or a dynamic invocation layer (dynamic closure
4234 // call dispatcher, mirror, etc.). Static targets never have covariant
4235 // arguments, and for non-static targets, they already perform the covariant
4236 // checks internally. Thus, no checks are needed and we just need to invoke
4237 // the target with the right receiver (unless static).
4238 //
4239 // TODO(dartbug.com/44195): Consider replacing the argument pushes + static
4240 // call with stack manipulation and a tail call instead.
4241
4242 intptr_t type_args_len = 0;
4243 if (function.IsGeneric()) {
4244 if (target.IsConstructor()) {
4245 const auto& result_type = AbstractType::Handle(Z, function.result_type());
4246 ASSERT(result_type.IsFinalized());
4247 // Instantiate a flattened type arguments vector which
4248 // includes type arguments corresponding to superclasses.
4249 // TranslateInstantiatedTypeArguments is smart enough to
4250 // avoid instantiation and reuse passed function type arguments
4251 // if there are no extra type arguments in the flattened vector.
4252 const auto& instantiated_type_arguments = TypeArguments::ZoneHandle(
4253 Z, Type::Cast(result_type).GetInstanceTypeArguments(H.thread()));
4254 closure +=
4255 TranslateInstantiatedTypeArguments(instantiated_type_arguments);
4256 } else {
4257 type_args_len = function.NumTypeParameters();
4258 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4259 closure += LoadLocal(parsed_function_->function_type_arguments());
4260 }
4261 } else if (target.IsFactory()) {
4262 // Factories always take an extra implicit argument for
4263 // type arguments even if their classes don't have type parameters.
4264 closure += NullConstant();
4265 }
4266
4267 // Push receiver.
4268 if (target.IsGenerativeConstructor()) {
4269 const Class& cls = Class::ZoneHandle(Z, target.Owner());
4270 if (cls.NumTypeArguments() > 0) {
4271 if (!function.IsGeneric()) {
4272 closure += Constant(TypeArguments::ZoneHandle(
4273 Z, cls.GetDeclarationInstanceTypeArguments()));
4274 }
4275 closure += AllocateObject(function.token_pos(), cls, 1);
4276 } else {
4277 ASSERT(!function.IsGeneric());
4278 closure += AllocateObject(function.token_pos(), cls, 0);
4279 }
4280 LocalVariable* receiver = MakeTemporary();
4281 closure += LoadLocal(receiver);
4282 } else if (!target.is_static()) {
4283 // The closure context is the receiver.
4284 closure += LoadLocal(parsed_function_->ParameterVariable(0));
4285 closure += LoadNativeField(Slot::Closure_context());
4286 }
4287
4288 closure += PushExplicitParameters(function);
4289
4290 // Forward parameters to the target.
4291 intptr_t argument_count = function.NumParameters() -
4292 function.NumImplicitParameters() +
4293 target.NumImplicitParameters();
4294 ASSERT(argument_count == target.NumParameters());
4295
4296 Array& argument_names =
4297 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4298
4299 closure += StaticCall(function.token_pos(), target, argument_count,
4300 argument_names, ICData::kNoRebind,
4301 /* result_type = */ nullptr, type_args_len);
4302
4303 if (target.IsGenerativeConstructor()) {
4304 // Drop result of constructor invocation, leave receiver
4305 // instance on the stack.
4306 closure += Drop();
4307 }
4308
4309 // Return the result.
4310 closure += Return(function.end_token_pos());
4311
4312 return new (Z)
4313 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4314 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4315}
4316
4317FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
4318 const Function& function) {
4319 ASSERT(function.IsImplicitGetterOrSetter() ||
4320 function.IsDynamicInvocationForwarder());
4321
4322 // Instead of building a dynamic invocation forwarder that checks argument
4323 // type and then invokes original setter we simply generate the type check
4324 // and inlined field store. Scope builder takes care of setting correct
4325 // type check mode in this case.
4326 const auto& target = Function::Handle(
4327 Z, function.IsDynamicInvocationForwarder() ? function.ForwardingTarget()
4328 : function.ptr());
4329 ASSERT(target.IsImplicitGetterOrSetter());
4330
4331 const bool is_method = !function.IsStaticFunction();
4332 const bool is_setter = target.IsImplicitSetterFunction();
4333 const bool is_getter = target.IsImplicitGetterFunction() ||
4334 target.IsImplicitStaticGetterFunction();
4335 ASSERT(is_setter || is_getter);
4336
4337 const auto& field = Field::ZoneHandle(Z, target.accessor_field());
4338
4339 graph_entry_ =
4340 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4341
4342 auto normal_entry = BuildFunctionEntry(graph_entry_);
4343 graph_entry_->set_normal_entry(normal_entry);
4344
4345 Fragment body(normal_entry);
4346 if (is_setter) {
4347 auto const setter_value =
4348 parsed_function_->ParameterVariable(is_method ? 1 : 0);
4349 if (is_method) {
4350 body += LoadLocal(parsed_function_->ParameterVariable(0));
4351 }
4352 body += LoadLocal(setter_value);
4353
4354 // The dyn:* forwarder has to check the parameters that the
4355 // actual target will not check.
4356 // Though here we manually inline the target, so the dyn:* forwarder has to
4357 // check all parameters.
4358 const bool needs_type_check = function.IsDynamicInvocationForwarder() ||
4359 setter_value->needs_type_check();
4360 if (needs_type_check) {
4361 body += CheckAssignable(setter_value->static_type(), setter_value->name(),
4362 AssertAssignableInstr::kParameterCheck,
4363 field.token_pos());
4364 }
4365 if (field.is_late()) {
4366 if (is_method) {
4367 body += Drop();
4368 }
4369 body += Drop();
4370 body += StoreLateField(
4371 field, is_method ? parsed_function_->ParameterVariable(0) : nullptr,
4372 setter_value);
4373 } else {
4374 if (is_method) {
4375 body += StoreFieldGuarded(field, StoreFieldInstr::Kind::kOther);
4376 } else {
4377 body += StoreStaticField(TokenPosition::kNoSource, field);
4378 }
4379 }
4380 body += NullConstant();
4381 } else {
4382 ASSERT(is_getter);
4383 if (is_method) {
4384 body += LoadLocal(parsed_function_->ParameterVariable(0));
4385 body += LoadField(
4386 field, /*calls_initializer=*/field.NeedsInitializationCheckOnLoad());
4387 } else if (field.is_const()) {
4388 const auto& value = Object::Handle(Z, field.StaticConstFieldValue());
4389 if (value.IsError()) {
4390 Report::LongJump(Error::Cast(value));
4391 }
4392 body += Constant(Instance::ZoneHandle(Z, Instance::RawCast(value.ptr())));
4393 } else {
4394 // Static fields
4395 // - with trivial initializer
4396 // - without initializer if they are not late
4397 // are initialized eagerly and do not have implicit getters.
4398 // Static fields with non-trivial initializer need getter to perform
4399 // lazy initialization. Late fields without initializer need getter
4400 // to make sure they are already initialized.
4401 ASSERT(field.has_nontrivial_initializer() ||
4402 (field.is_late() && !field.has_initializer()));
4403 body += LoadStaticField(field, /*calls_initializer=*/true);
4404 }
4405
4406 if (is_method || !field.is_const()) {
4407#if defined(PRODUCT)
4408 RELEASE_ASSERT(!field.needs_load_guard());
4409#else
4410 // Always build fragment for load guard to maintain stable deopt_id
4411 // numbering, but link it into the graph only if field actually
4412 // needs load guard.
4413 Fragment load_guard = CheckAssignable(
4414 AbstractType::Handle(Z, field.type()), Symbols::FunctionResult());
4415 if (field.needs_load_guard()) {
4416 ASSERT(IG->HasAttemptedReload());
4417 body += load_guard;
4418 }
4419#endif
4420 }
4421 }
4422 body += Return(TokenPosition::kNoSource);
4423
4424 PrologueInfo prologue_info(-1, -1);
4425 return new (Z)
4426 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4427 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4428}
4429
4430FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
4431 const Function& function) {
4432 auto& name = String::Handle(Z, function.name());
4433 name = Function::DemangleDynamicInvocationForwarderName(name);
4434 const auto& target = Function::ZoneHandle(Z, function.ForwardingTarget());
4435 ASSERT(!target.IsNull());
4436
4437 if (target.IsImplicitSetterFunction() || target.IsImplicitGetterFunction()) {
4438 return BuildGraphOfFieldAccessor(function);
4439 }
4440 if (target.IsMethodExtractor()) {
4441 return BuildGraphOfMethodExtractor(target);
4442 }
4443 if (FlowGraphBuilder::IsRecognizedMethodForFlowGraph(function)) {
4444 return BuildGraphOfRecognizedMethod(function);
4445 }
4446
4447 graph_entry_ = new (Z) GraphEntryInstr(*parsed_function_, osr_id_);
4448
4449 auto normal_entry = BuildFunctionEntry(graph_entry_);
4450 graph_entry_->set_normal_entry(normal_entry);
4451
4452 PrologueInfo prologue_info(-1, -1);
4453 auto instruction_cursor = BuildPrologue(normal_entry, &prologue_info);
4454
4455 Fragment body;
4456 if (!function.is_native()) {
4457 body += CheckStackOverflowInPrologue(function.token_pos());
4458 }
4459
4460 ASSERT(parsed_function_->scope()->num_context_variables() == 0);
4461
4462 // Should never build a dynamic invocation forwarder for equality
4463 // operator.
4464 ASSERT(function.name() != Symbols::EqualOperator().ptr());
4465
4466 // Even if the caller did not pass argument vector we would still
4467 // call the target with instantiate-to-bounds type arguments.
4468 body += BuildDefaultTypeHandling(function);
4469
4470 // Build argument type checks that complement those that are emitted in the
4471 // target.
4472 BuildTypeArgumentTypeChecks(
4473 TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds, &body);
4474 BuildArgumentTypeChecks(&body, &body, nullptr);
4475
4476 // Push all arguments and invoke the original method.
4477
4478 intptr_t type_args_len = 0;
4479 if (function.IsGeneric()) {
4480 type_args_len = function.NumTypeParameters();
4481 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4482 body += LoadLocal(parsed_function_->function_type_arguments());
4483 }
4484
4485 // Push receiver.
4486 ASSERT(function.NumImplicitParameters() == 1);
4487 body += LoadLocal(parsed_function_->receiver_var());
4488 body += PushExplicitParameters(function, target);
4489
4490 const intptr_t argument_count = function.NumParameters();
4491 const auto& argument_names =
4492 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4493
4494 body += StaticCall(TokenPosition::kNoSource, target, argument_count,
4495 argument_names, ICData::kNoRebind, nullptr, type_args_len);
4496
4497 if (target.has_unboxed_integer_return()) {
4498 body += Box(kUnboxedInt64);
4499 } else if (target.has_unboxed_double_return()) {
4500 body += Box(kUnboxedDouble);
4501 } else if (target.has_unboxed_record_return()) {
4502 // Handled in SelectRepresentations pass in optimized mode.
4503 ASSERT(optimizing_);
4504 }
4505
4506 // Later optimization passes assume that result of a x.[]=(...) call is not
4507 // used. We must guarantee this invariant because violation will lead to an
4508 // illegal IL once we replace x.[]=(...) with a sequence that does not
4509 // actually produce any value. See http://dartbug.com/29135 for more details.
4510 if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
4511 body += Drop();
4512 body += NullConstant();
4513 }
4514
4515 body += Return(TokenPosition::kNoSource);
4516
4517 instruction_cursor->LinkTo(body.entry);
4518
4519 // When compiling for OSR, use a depth first search to find the OSR
4520 // entry and make graph entry jump to it instead of normal entry.
4521 // Catch entries are always considered reachable, even if they
4522 // become unreachable after OSR.
4523 if (IsCompiledForOsr()) {
4524 graph_entry_->RelinkToOsrEntry(Z, last_used_block_id_ + 1);
4525 }
4526 return new (Z)
4527 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4528 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4529}
4530
4531void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4532 const Fragment& fragment,
4533 int64_t min,
4534 int64_t max) {
4535 ASSERT(fragment.current->IsDefinition());
4536 Range range(RangeBoundary::FromConstant(min),
4537 RangeBoundary::FromConstant(max));
4538 fragment.current->AsDefinition()->set_range(range);
4539}
4540
4541static classid_t TypedDataCidUnboxed(Representation unboxed_representation) {
4542 switch (unboxed_representation) {
4543 case kUnboxedFloat:
4544 // Note kTypedDataFloat32ArrayCid loads kUnboxedDouble.
4545 UNREACHABLE();
4546 return kTypedDataFloat32ArrayCid;
4547 case kUnboxedInt32:
4548 return kTypedDataInt32ArrayCid;
4549 case kUnboxedUint32:
4550 return kTypedDataUint32ArrayCid;
4551 case kUnboxedInt64:
4552 return kTypedDataInt64ArrayCid;
4553 case kUnboxedDouble:
4554 return kTypedDataFloat64ArrayCid;
4555 default:
4556 UNREACHABLE();
4557 }
4558 UNREACHABLE();
4559}
4560
4561Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4562 Representation unboxed_representation,
4563 intptr_t index_scale,
4564 bool index_unboxed) {
4565 ASSERT(unboxed_representation == kUnboxedInt32 ||
4566 unboxed_representation == kUnboxedUint32 ||
4567 unboxed_representation == kUnboxedInt64 ||
4568 unboxed_representation == kUnboxedFloat ||
4569 unboxed_representation == kUnboxedDouble);
4570 Fragment fragment;
4571 if (unboxed_representation == kUnboxedFloat) {
4572 fragment += BitCast(kUnboxedFloat, kUnboxedInt32);
4573 unboxed_representation = kUnboxedInt32;
4574 }
4575 fragment += StoreIndexedTypedData(TypedDataCidUnboxed(unboxed_representation),
4576 index_scale, index_unboxed);
4577 return fragment;
4578}
4579
4580Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4581 Representation unboxed_representation,
4582 intptr_t index_scale,
4583 bool index_unboxed) {
4584 ASSERT(unboxed_representation == kUnboxedInt32 ||
4585 unboxed_representation == kUnboxedUint32 ||
4586 unboxed_representation == kUnboxedInt64 ||
4587 unboxed_representation == kUnboxedFloat ||
4588 unboxed_representation == kUnboxedDouble);
4589 Representation representation_for_load = unboxed_representation;
4590 if (unboxed_representation == kUnboxedFloat) {
4591 representation_for_load = kUnboxedInt32;
4592 }
4593 Fragment fragment;
4594 fragment += LoadIndexed(TypedDataCidUnboxed(representation_for_load),
4595 index_scale, index_unboxed);
4596 if (unboxed_representation == kUnboxedFloat) {
4597 fragment += BitCast(kUnboxedInt32, kUnboxedFloat);
4598 }
4599 return fragment;
4600}
4601
4602Fragment FlowGraphBuilder::UnhandledException() {
4603 const auto class_table = thread_->isolate_group()->class_table();
4604 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4605 const auto& klass =
4606 Class::ZoneHandle(H.zone(), class_table->At(kUnhandledExceptionCid));
4607 ASSERT(!klass.IsNull());
4608 Fragment body;
4609 body += AllocateObject(TokenPosition::kNoSource, klass, 0);
4610 LocalVariable* error_instance = MakeTemporary();
4611
4612 body += LoadLocal(error_instance);
4613 body += LoadLocal(CurrentException());
4614 body +=
4615 StoreNativeField(Slot::UnhandledException_exception(),
4616 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
4617
4618 body += LoadLocal(error_instance);
4619 body += LoadLocal(CurrentStackTrace());
4620 body +=
4621 StoreNativeField(Slot::UnhandledException_stacktrace(),
4622 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
4623
4624 return body;
4625}
4626
4627Fragment FlowGraphBuilder::UnboxTruncate(Representation to) {
4628 auto const unbox_to = to == kUnboxedFloat ? kUnboxedDouble : to;
4629 Fragment instructions;
4630 auto* unbox = UnboxInstr::Create(unbox_to, Pop(), DeoptId::kNone,
4631 Instruction::kNotSpeculative);
4632 instructions <<= unbox;
4633 Push(unbox);
4634 if (to == kUnboxedFloat) {
4635 instructions += DoubleToFloat();
4636 }
4637 return instructions;
4638}
4639
4640Fragment FlowGraphBuilder::LoadThread() {
4641 LoadThreadInstr* instr = new (Z) LoadThreadInstr();
4642 Push(instr);
4643 return Fragment(instr);
4644}
4645
4646Fragment FlowGraphBuilder::LoadIsolate() {
4647 Fragment body;
4648 body += LoadThread();
4649 body += LoadNativeField(Slot::Thread_isolate());
4650 return body;
4651}
4652
4653Fragment FlowGraphBuilder::LoadIsolateGroup() {
4654 Fragment body;
4655 body += LoadThread();
4656 body += LoadNativeField(Slot::Thread_isolate_group());
4657 return body;
4658}
4659
4660Fragment FlowGraphBuilder::LoadObjectStore() {
4661 Fragment body;
4662 body += LoadIsolateGroup();
4663 body += LoadNativeField(Slot::IsolateGroup_object_store());
4664 return body;
4665}
4666
4667Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4668 Fragment body;
4669 body += LoadThread();
4670 body += LoadNativeField(Slot::Thread_service_extension_stream());
4671 return body;
4672}
4673
4674// TODO(http://dartbug.com/47487): Support unboxed output value.
4675Fragment FlowGraphBuilder::BoolToInt() {
4676 // TODO(http://dartbug.com/36855) Build IfThenElseInstr, instead of letting
4677 // the optimizer turn this into that.
4678
4679 LocalVariable* expression_temp = parsed_function_->expression_temp_var();
4680
4681 Fragment instructions;
4682 TargetEntryInstr* is_true;
4683 TargetEntryInstr* is_false;
4684
4685 instructions += BranchIfTrue(&is_true, &is_false);
4686 JoinEntryInstr* join = BuildJoinEntry();
4687
4688 {
4689 Fragment store_1(is_true);
4690 store_1 += IntConstant(1);
4691 store_1 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4692 store_1 += Drop();
4693 store_1 += Goto(join);
4694 }
4695
4696 {
4697 Fragment store_0(is_false);
4698 store_0 += IntConstant(0);
4699 store_0 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4700 store_0 += Drop();
4701 store_0 += Goto(join);
4702 }
4703
4704 instructions = Fragment(instructions.entry, join);
4705 instructions += LoadLocal(expression_temp);
4706 return instructions;
4707}
4708
4709Fragment FlowGraphBuilder::IntToBool() {
4710 Fragment body;
4711 body += IntConstant(0);
4712 body += StrictCompare(Token::kNE_STRICT);
4713 return body;
4714}
4715
4716Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4717 Token::Kind kind) {
4718 if (CompilerState::Current().is_aot()) {
4719 Value* right = Pop();
4720 Value* left = Pop();
4721 RelationalOpInstr* instr = new (Z) RelationalOpInstr(
4722 InstructionSource(position), kind, left, right, kMintCid,
4723 GetNextDeoptId(), Instruction::SpeculativeMode::kNotSpeculative);
4724 Push(instr);
4725 return Fragment(instr);
4726 }
4727 const String* name = nullptr;
4728 switch (kind) {
4729 case Token::kLT:
4730 name = &Symbols::LAngleBracket();
4731 break;
4732 case Token::kGT:
4733 name = &Symbols::RAngleBracket();
4734 break;
4735 case Token::kLTE:
4736 name = &Symbols::LessEqualOperator();
4737 break;
4738 case Token::kGTE:
4739 name = &Symbols::GreaterEqualOperator();
4740 break;
4741 default:
4742 UNREACHABLE();
4743 }
4744 return InstanceCall(
4745 position, *name, kind, /*type_args_len=*/0, /*argument_count=*/2,
4746 /*argument_names=*/Array::null_array(), /*checked_argument_count=*/2);
4747}
4748
4749Fragment FlowGraphBuilder::NativeReturn(
4750 const compiler::ffi::CallbackMarshaller& marshaller) {
4751 const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
4752 if (num_return_defs == 1) {
4753 auto* instr = new (Z) NativeReturnInstr(Pop(), marshaller);
4754 return Fragment(instr).closed();
4755 }
4756 ASSERT_EQUAL(num_return_defs, 2);
4757 auto* offset = Pop();
4758 auto* typed_data_base = Pop();
4759 auto* instr = new (Z) NativeReturnInstr(typed_data_base, offset, marshaller);
4760 return Fragment(instr).closed();
4761}
4762
4763Fragment FlowGraphBuilder::BitCast(Representation from, Representation to) {
4764 BitCastInstr* instr = new (Z) BitCastInstr(from, to, Pop());
4765 Push(instr);
4766 return Fragment(instr);
4767}
4768
4769Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4770 Call1ArgStubInstr::StubId stub_id) {
4771 Call1ArgStubInstr* instr = new (Z) Call1ArgStubInstr(
4772 InstructionSource(position), stub_id, Pop(), GetNextDeoptId());
4773 Push(instr);
4774 return Fragment(instr);
4775}
4776
4777Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4778 SuspendInstr::StubId stub_id) {
4779 Value* type_args =
4780 (stub_id == SuspendInstr::StubId::kAwaitWithTypeCheck) ? Pop() : nullptr;
4781 Value* operand = Pop();
4782 SuspendInstr* instr =
4783 new (Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4784 type_args, GetNextDeoptId(), GetNextDeoptId());
4785 Push(instr);
4786 return Fragment(instr);
4787}
4788
4789Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4790 const AbstractType& compound_type) {
4791 const auto& compound_sub_class =
4792 Class::ZoneHandle(Z, compound_type.type_class());
4793 compound_sub_class.EnsureIsFinalized(thread_);
4794
4795 auto& state = thread_->compiler_state();
4796
4797 Fragment body;
4798 LocalVariable* typed_data = MakeTemporary("typed_data_base");
4799 body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4800 LocalVariable* compound = MakeTemporary("compound");
4801 body += LoadLocal(compound);
4802 body += LoadLocal(typed_data);
4803 body += StoreField(state.CompoundTypedDataBaseField(),
4804 StoreFieldInstr::Kind::kInitializing);
4805 body += LoadLocal(compound);
4806 body += IntConstant(0);
4807 body += StoreField(state.CompoundOffsetInBytesField(),
4808 StoreFieldInstr::Kind::kInitializing);
4809 body += DropTempsPreserveTop(1); // Drop TypedData.
4810 return body;
4811}
4812
4813Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4814 Fragment body;
4815 auto& state = thread_->compiler_state();
4816 body += LoadField(state.CompoundTypedDataBaseField(),
4817 /*calls_initializer=*/false);
4818 return body;
4819}
4820
4821Fragment FlowGraphBuilder::LoadOffsetInBytesFromCompound() {
4822 Fragment body;
4823 auto& state = thread_->compiler_state();
4824 body += LoadField(state.CompoundOffsetInBytesField(),
4825 /*calls_initializer=*/false);
4826 return body;
4827}
4828
4829Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4830 ZoneGrowableArray<LocalVariable*>* definitions,
4831 const GrowableArray<Representation>& representations) {
4832 Fragment body;
4833 const intptr_t num_defs = representations.length();
4834 ASSERT(definitions->length() == num_defs);
4835
4836 LocalVariable* uint8_list = MakeTemporary("uint8_list");
4837 int offset_in_bytes = 0;
4838 for (intptr_t i = 0; i < num_defs; i++) {
4839 const Representation representation = representations[i];
4840 body += LoadLocal(uint8_list);
4841 body += IntConstant(offset_in_bytes);
4842 body += LoadLocal(definitions->At(i));
4843 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4844 /*index_unboxed=*/false);
4845 offset_in_bytes += RepresentationUtils::ValueSize(representation);
4846 }
4847 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
4848 return body;
4849}
4850
4851static intptr_t chunk_size(intptr_t bytes_left) {
4852 ASSERT(bytes_left >= 1);
4853 if (bytes_left >= 8 && compiler::target::kWordSize == 8) {
4854 return 8;
4855 }
4856 if (bytes_left >= 4) {
4857 return 4;
4858 }
4859 if (bytes_left >= 2) {
4860 return 2;
4861 }
4862 return 1;
4863}
4864
4865static classid_t typed_data_cid(intptr_t chunk_size) {
4866 switch (chunk_size) {
4867 case 8:
4868 return kTypedDataInt64ArrayCid;
4869 case 4:
4870 return kTypedDataInt32ArrayCid;
4871 case 2:
4872 return kTypedDataInt16ArrayCid;
4873 case 1:
4874 return kTypedDataInt8ArrayCid;
4875 }
4876 UNREACHABLE();
4877}
4878
4879// Only for use within FfiCallbackConvertCompoundArgumentToDart and
4880// FfiCallbackConvertCompoundReturnToNative, where we know the "array" being
4881// passed is an untagged pointer coming from C.
4882static classid_t external_typed_data_cid(intptr_t chunk_size) {
4883 switch (chunk_size) {
4884 case 8:
4885 return kExternalTypedDataInt64ArrayCid;
4886 case 4:
4887 return kExternalTypedDataInt32ArrayCid;
4888 case 2:
4889 return kExternalTypedDataInt16ArrayCid;
4890 case 1:
4891 return kExternalTypedDataInt8ArrayCid;
4892 }
4893 UNREACHABLE();
4894}
4895
4896Fragment FlowGraphBuilder::LoadTail(LocalVariable* variable,
4897 intptr_t size,
4898 intptr_t offset_in_bytes,
4899 Representation representation) {
4900 Fragment body;
4901 if (size == 8 || size == 4) {
4902 body += LoadLocal(variable);
4903 body += LoadTypedDataBaseFromCompound();
4904 body += LoadLocal(variable);
4905 body += LoadOffsetInBytesFromCompound();
4906 body += IntConstant(offset_in_bytes);
4907 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4908 body += LoadIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4909 /*index_unboxed=*/false);
4910 return body;
4911 }
4912 ASSERT(representation != kUnboxedFloat);
4913 ASSERT(representation != kUnboxedDouble);
4914 intptr_t shift = 0;
4915 intptr_t remaining = size;
4916 auto step = [&](intptr_t part_bytes, intptr_t part_cid) {
4917 while (remaining >= part_bytes) {
4918 body += LoadLocal(variable);
4919 body += LoadTypedDataBaseFromCompound();
4920 body += LoadLocal(variable);
4921 body += LoadOffsetInBytesFromCompound();
4922 body += IntConstant(offset_in_bytes);
4923 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4924 body += LoadIndexed(part_cid, /*index_scale*/ 1,
4925 /*index_unboxed=*/false);
4926 if (shift != 0) {
4927 body += IntConstant(shift);
4928 // 64-bit doesn't support kUnboxedInt32 ops.
4929 Representation op_representation = kUnboxedIntPtr;
4930 body += BinaryIntegerOp(Token::kSHL, op_representation,
4931 /*is_truncating*/ true);
4932 body += BinaryIntegerOp(Token::kBIT_OR, op_representation,
4933 /*is_truncating*/ true);
4934 }
4935 offset_in_bytes += part_bytes;
4936 remaining -= part_bytes;
4937 shift += part_bytes * kBitsPerByte;
4938 }
4939 };
4940 step(8, kTypedDataUint64ArrayCid);
4941 step(4, kTypedDataUint32ArrayCid);
4942 step(2, kTypedDataUint16ArrayCid);
4943 step(1, kTypedDataUint8ArrayCid);
4944
4945 // Sigh, LoadIndex's representation for int8/16 is [u]int64, but the FfiCall
4946 // wants an [u]int32 input. Manually insert a "truncating" conversion so one
4947 // isn't automatically added that thinks it can deopt.
4948 Representation from_representation = Peek(0)->representation();
4949 if (from_representation != representation) {
4950 IntConverterInstr* convert = new IntConverterInstr(
4951 from_representation, representation, Pop(), DeoptId::kNone);
4952 convert->mark_truncating();
4953 Push(convert);
4954 body <<= convert;
4955 }
4956
4957 return body;
4958}
4959
4960Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4961 LocalVariable* variable,
4962 const compiler::ffi::BaseMarshaller& marshaller,
4963 intptr_t arg_index) {
4964 Fragment body;
4965 const auto& native_loc = marshaller.Location(arg_index);
4966 if (native_loc.IsMultiple()) {
4967 const auto& multiple_loc = native_loc.AsMultiple();
4968 intptr_t offset_in_bytes = 0;
4969 for (intptr_t i = 0; i < multiple_loc.locations().length(); i++) {
4970 const auto& loc = *multiple_loc.locations()[i];
4971 Representation representation;
4972 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
4973 // IL can only pass integers to integer Locations, so pass as integer if
4974 // the Location requires it to be an integer.
4975 representation = loc.container_type().AsRepresentationOverApprox(Z);
4976 } else {
4977 // Representations do not support 8 or 16 bit ints, over approximate to
4978 // 32 bits.
4979 representation = loc.payload_type().AsRepresentationOverApprox(Z);
4980 }
4981 intptr_t size = loc.payload_type().SizeInBytes();
4982 body += LoadTail(variable, size, offset_in_bytes, representation);
4983 offset_in_bytes += size;
4984 }
4985 } else if (native_loc.IsStack()) {
4986 // Break struct in pieces to separate IL definitions to pass those
4987 // separate definitions into the FFI call.
4988 Representation representation = kUnboxedWord;
4989 intptr_t remaining = native_loc.payload_type().SizeInBytes();
4990 intptr_t offset_in_bytes = 0;
4991 while (remaining >= compiler::target::kWordSize) {
4992 body += LoadTail(variable, compiler::target::kWordSize, offset_in_bytes,
4993 representation);
4994 offset_in_bytes += compiler::target::kWordSize;
4995 remaining -= compiler::target::kWordSize;
4996 }
4997 if (remaining > 0) {
4998 body += LoadTail(variable, remaining, offset_in_bytes, representation);
4999 }
5000 } else {
5001 ASSERT(native_loc.IsPointerToMemory());
5002 // Only load the typed data, do copying in the FFI call machine code.
5003 body += LoadLocal(variable); // User-defined struct.
5004 body += LoadTypedDataBaseFromCompound();
5005 body += LoadLocal(variable); // User-defined struct.
5006 body += LoadOffsetInBytesFromCompound();
5007 body += UnboxTruncate(kUnboxedWord);
5008 }
5009 return body;
5010}
5011
5012Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
5013 const compiler::ffi::BaseMarshaller& marshaller,
5014 intptr_t arg_index) {
5015 Fragment body;
5016 // The typed data is allocated before the FFI call, and is populated in
5017 // machine code. So, here, it only has to be wrapped in the struct class.
5018 const auto& compound_type =
5019 AbstractType::Handle(Z, marshaller.CType(arg_index));
5020 body += WrapTypedDataBaseInCompound(compound_type);
5021 return body;
5022}
5023
5024Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
5025 const compiler::ffi::BaseMarshaller& marshaller,
5026 intptr_t arg_index,
5027 ZoneGrowableArray<LocalVariable*>* definitions) {
5028 const intptr_t length_in_bytes =
5029 marshaller.Location(arg_index).payload_type().SizeInBytes();
5030
5031 Fragment body;
5032 if (marshaller.Location(arg_index).IsMultiple()) {
5033 body += IntConstant(length_in_bytes);
5034 body +=
5035 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5036 LocalVariable* uint8_list = MakeTemporary("uint8_list");
5037
5038 const auto& multiple_loc = marshaller.Location(arg_index).AsMultiple();
5039 const intptr_t num_defs = multiple_loc.locations().length();
5040 intptr_t offset_in_bytes = 0;
5041 for (intptr_t i = 0; i < num_defs; i++) {
5042 const auto& loc = *multiple_loc.locations()[i];
5043 Representation representation;
5044 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
5045 // IL can only pass integers to integer Locations, so pass as integer if
5046 // the Location requires it to be an integer.
5047 representation = loc.container_type().AsRepresentationOverApprox(Z);
5048 } else {
5049 // Representations do not support 8 or 16 bit ints, over approximate to
5050 // 32 bits.
5051 representation = loc.payload_type().AsRepresentationOverApprox(Z);
5052 }
5053 body += LoadLocal(uint8_list);
5054 body += IntConstant(offset_in_bytes);
5055 body += LoadLocal(definitions->At(i));
5056 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
5057 /*index_unboxed=*/false);
5058 offset_in_bytes += loc.payload_type().SizeInBytes();
5059 }
5060
5061 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
5062 } else if (marshaller.Location(arg_index).IsStack()) {
5063 // Allocate and populate a TypedData from the individual NativeParameters.
5064 body += IntConstant(length_in_bytes);
5065 body +=
5066 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5067 GrowableArray<Representation> representations;
5068 marshaller.RepsInFfiCall(arg_index, &representations);
5069 body += PopFromStackToTypedDataBase(definitions, representations);
5070 } else {
5071 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
5072 // Allocate a TypedData and copy contents pointed to by an address into it.
5073 LocalVariable* address_of_compound = MakeTemporary("address_of_compound");
5074 body += IntConstant(length_in_bytes);
5075 body +=
5076 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5077 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5078 intptr_t offset_in_bytes = 0;
5079 while (offset_in_bytes < length_in_bytes) {
5080 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5081 const intptr_t chunk_sizee = chunk_size(bytes_left);
5082
5083 body += LoadLocal(address_of_compound);
5084 body += IntConstant(offset_in_bytes);
5085 body +=
5086 LoadIndexed(external_typed_data_cid(chunk_sizee), /*index_scale=*/1,
5087 /*index_unboxed=*/false);
5088 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5089
5090 body += LoadLocal(typed_data_base);
5091 body += IntConstant(offset_in_bytes);
5092 body += LoadLocal(chunk_value);
5093 body += StoreIndexedTypedData(typed_data_cid(chunk_sizee),
5094 /*index_scale=*/1,
5095 /*index_unboxed=*/false);
5096 body += DropTemporary(&chunk_value);
5097
5098 offset_in_bytes += chunk_sizee;
5099 }
5100 ASSERT(offset_in_bytes == length_in_bytes);
5101 body += DropTempsPreserveTop(1); // Drop address_of_compound.
5102 }
5103 // Wrap typed data in compound class.
5104 const auto& compound_type =
5105 AbstractType::Handle(Z, marshaller.CType(arg_index));
5106 body += WrapTypedDataBaseInCompound(compound_type);
5107 return body;
5108}
5109
5110Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
5111 const compiler::ffi::CallbackMarshaller& marshaller,
5112 intptr_t arg_index) {
5113 Fragment body;
5114 const auto& native_loc = marshaller.Location(arg_index);
5115 if (native_loc.IsMultiple()) {
5116 // Pass in typed data and offset to native return instruction, and do the
5117 // copying in machine code.
5118 LocalVariable* compound = MakeTemporary("compound");
5119 body += LoadLocal(compound);
5120 body += LoadOffsetInBytesFromCompound();
5121 body += UnboxTruncate(kUnboxedWord);
5122 body += StoreLocal(TokenPosition::kNoSource,
5123 parsed_function_->expression_temp_var());
5124 body += Drop();
5125 body += LoadTypedDataBaseFromCompound();
5126 body += LoadLocal(parsed_function_->expression_temp_var());
5127 } else {
5128 ASSERT(native_loc.IsPointerToMemory());
5129 // We copy the data into the right location in IL.
5130 const intptr_t length_in_bytes =
5131 marshaller.Location(arg_index).payload_type().SizeInBytes();
5132
5133 LocalVariable* compound = MakeTemporary("compound");
5134 body += LoadLocal(compound);
5135 body += LoadTypedDataBaseFromCompound();
5136 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5137 body += LoadLocal(compound);
5138 body += LoadOffsetInBytesFromCompound();
5139 LocalVariable* offset = MakeTemporary("offset");
5140
5141 auto* pointer_to_return =
5142 new (Z) NativeParameterInstr(marshaller, compiler::ffi::kResultIndex);
5143 Push(pointer_to_return); // Address where return value should be stored.
5144 body <<= pointer_to_return;
5145 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
5146
5147 intptr_t offset_in_bytes = 0;
5148 while (offset_in_bytes < length_in_bytes) {
5149 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5150 const intptr_t chunk_sizee = chunk_size(bytes_left);
5151
5152 body += LoadLocal(typed_data_base);
5153 body += LoadLocal(offset);
5154 body += IntConstant(offset_in_bytes);
5155 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
5156 body += LoadIndexed(typed_data_cid(chunk_sizee), /*index_scale=*/1,
5157 /*index_unboxed=*/false);
5158 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5159
5160 body += LoadLocal(unboxed_address);
5161 body += IntConstant(offset_in_bytes);
5162 body += LoadLocal(chunk_value);
5163 body += StoreIndexedTypedData(external_typed_data_cid(chunk_sizee),
5164 /*index_scale=*/1,
5165 /*index_unboxed=*/false);
5166 body += DropTemporary(&chunk_value);
5167
5168 offset_in_bytes += chunk_sizee;
5169 }
5170
5171 ASSERT(offset_in_bytes == length_in_bytes);
5172 body += DropTempsPreserveTop(3);
5173 }
5174 return body;
5175}
5176
5177Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
5178 const compiler::ffi::BaseMarshaller& marshaller,
5179 intptr_t arg_index) {
5180 ASSERT(!marshaller.IsCompoundCType(arg_index));
5181
5182 Fragment body;
5183 if (marshaller.IsPointerPointer(arg_index)) {
5184 Class& result_class =
5185 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
5186 // This class might only be instantiated as a return type of ffi calls.
5187 result_class.EnsureIsFinalized(thread_);
5188
5189 TypeArguments& args =
5190 TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
5191
5192 // A kernel transform for FFI in the front-end ensures that type parameters
5193 // do not appear in the type arguments to a any Pointer classes in an FFI
5194 // signature.
5195 ASSERT(args.IsNull() || args.IsInstantiated());
5196 args = args.Canonicalize(thread_);
5197
5198 LocalVariable* address = MakeTemporary("address");
5199 LocalVariable* result = parsed_function_->expression_temp_var();
5200
5201 body += Constant(args);
5202 body += AllocateObject(TokenPosition::kNoSource, result_class, 1);
5203 body += StoreLocal(TokenPosition::kNoSource, result);
5204 body += LoadLocal(address);
5205 body += StoreNativeField(Slot::PointerBase_data(),
5206 InnerPointerAccess::kCannotBeInnerPointer,
5207 StoreFieldInstr::Kind::kInitializing);
5208 body += DropTemporary(&address); // address
5209 body += LoadLocal(result);
5210 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5211 UNREACHABLE(); // Only supported for FFI call arguments.
5212 } else if (marshaller.IsCompoundPointer(arg_index)) {
5213 UNREACHABLE(); // Only supported for FFI call arguments.
5214 } else if (marshaller.IsHandleCType(arg_index)) {
5215 // The top of the stack is a Dart_Handle, so retrieve the tagged pointer
5216 // out of it.
5217 body += LoadNativeField(Slot::LocalHandle_ptr());
5218 } else if (marshaller.IsVoid(arg_index)) {
5219 // Ignore whatever value was being returned and return null.
5220 ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
5221 body += Drop();
5222 body += NullConstant();
5223 } else {
5224 if (marshaller.RequiresBitCast(arg_index)) {
5225 body += BitCast(
5226 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)),
5227 marshaller.RepInDart(arg_index));
5228 }
5229
5230 body += Box(marshaller.RepInDart(arg_index));
5231
5232 if (marshaller.IsBool(arg_index)) {
5233 body += IntToBool();
5234 }
5235 }
5236 return body;
5237}
5238
5239Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
5240 const compiler::ffi::BaseMarshaller& marshaller,
5241 intptr_t arg_index,
5242 LocalVariable* variable) {
5243 ASSERT(!marshaller.IsCompoundCType(arg_index));
5244
5245 Fragment body;
5246 if (marshaller.IsPointerPointer(arg_index)) {
5247 // This can only be Pointer, so it is safe to load the data field.
5248 body += LoadNativeField(Slot::PointerBase_data(),
5249 InnerPointerAccess::kCannotBeInnerPointer);
5250 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5251 // Nothing to do. Unwrap in `FfiCallInstr::EmitNativeCode`.
5252 } else if (marshaller.IsCompoundPointer(arg_index)) {
5253 ASSERT(variable != nullptr);
5254 body += LoadTypedDataBaseFromCompound();
5255 body += LoadLocal(variable); // User-defined struct.
5256 body += LoadOffsetInBytesFromCompound();
5257 body += UnboxTruncate(kUnboxedWord);
5258 } else if (marshaller.IsHandleCType(arg_index)) {
5259 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5260 // pointer to the stack slot as the native handle argument. Therefore the
5261 // only handles that need wrapping are function results.
5262 ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
5263 LocalVariable* object = MakeTemporary("object");
5264
5265 auto* const arg_reps =
5266 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5267
5268 // Get a reference to the top handle scope.
5269 body += LoadThread();
5270 body += LoadNativeField(Slot::Thread_api_top_scope());
5271 arg_reps->Add(kUntagged);
5272
5273 // Allocate a new handle in the top handle scope.
5274 body +=
5275 CallLeafRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
5276
5277 LocalVariable* handle = MakeTemporary("handle");
5278
5279 // Store the object address into the handle.
5280 body += LoadLocal(handle);
5281 body += LoadLocal(object);
5282 body += StoreNativeField(Slot::LocalHandle_ptr(),
5283 StoreFieldInstr::Kind::kInitializing);
5284
5285 body += DropTempsPreserveTop(1); // Drop object.
5286 } else if (marshaller.IsVoid(arg_index)) {
5287 ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
5288 // Ignore whatever value was being returned and return nullptr.
5289 body += Drop();
5290 body += UnboxedIntConstant(0, kUnboxedIntPtr);
5291 } else {
5292 if (marshaller.IsBool(arg_index)) {
5293 body += BoolToInt();
5294 }
5295
5296 body += UnboxTruncate(marshaller.RepInDart(arg_index));
5297 }
5298
5299 if (marshaller.RequiresBitCast(arg_index)) {
5300 body += BitCast(
5301 marshaller.RepInDart(arg_index),
5302 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)));
5303 }
5304
5305 return body;
5306}
5307
5308FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
5309 const Function& function) {
5310 switch (function.GetFfiCallbackKind()) {
5311 case FfiCallbackKind::kIsolateLocalStaticCallback:
5312 case FfiCallbackKind::kIsolateLocalClosureCallback:
5313 return BuildGraphOfSyncFfiCallback(function);
5314 case FfiCallbackKind::kAsyncCallback:
5315 return BuildGraphOfAsyncFfiCallback(function);
5316 }
5317 UNREACHABLE();
5318 return nullptr;
5319}
5320
5321Fragment FlowGraphBuilder::FfiNativeLookupAddress(
5322 const dart::Instance& native) {
5323 const auto& native_class = Class::Handle(Z, native.clazz());
5324 ASSERT(String::Handle(Z, native_class.UserVisibleName())
5325 .Equals(Symbols::FfiNative()));
5326 const auto& native_class_fields = Array::Handle(Z, native_class.fields());
5327 ASSERT(native_class_fields.Length() == 4);
5328 const auto& symbol_field =
5329 Field::Handle(Z, Field::RawCast(native_class_fields.At(1)));
5330 ASSERT(!symbol_field.is_static());
5331 const auto& asset_id_field =
5332 Field::Handle(Z, Field::RawCast(native_class_fields.At(2)));
5333 ASSERT(!asset_id_field.is_static());
5334 const auto& symbol =
5335 String::ZoneHandle(Z, String::RawCast(native.GetField(symbol_field)));
5336 const auto& asset_id =
5337 String::ZoneHandle(Z, String::RawCast(native.GetField(asset_id_field)));
5338 const auto& type_args = TypeArguments::Handle(Z, native.GetTypeArguments());
5339 ASSERT(type_args.Length() == 1);
5340 const auto& native_type = AbstractType::ZoneHandle(Z, type_args.TypeAt(0));
5341 intptr_t arg_n;
5342 if (native_type.IsFunctionType()) {
5343 const auto& native_function_type = FunctionType::Cast(native_type);
5344 arg_n = native_function_type.NumParameters() -
5345 native_function_type.num_implicit_parameters();
5346 } else {
5347 // We're looking up the address of a native field.
5348 arg_n = 0;
5349 }
5350 const auto& ffi_resolver =
5351 Function::ZoneHandle(Z, IG->object_store()->ffi_resolver_function());
5352#if !defined(TARGET_ARCH_IA32)
5353 // Access to the pool, use cacheable static call.
5354 Fragment body;
5355 body += Constant(asset_id);
5356 body += Constant(symbol);
5357 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5358 body +=
5359 CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
5360 /*argument_count=*/3,
5361 /*argument_names=*/Array::null_array(),
5362 /*type_args_count=*/0);
5363 return body;
5364#else // !defined(TARGET_ARCH_IA32)
5365 // IA32 only has JIT and no pool. This function will only be compiled if
5366 // immediately run afterwards, so do the lookup here.
5367 char* error = nullptr;
5368#if !defined(DART_PRECOMPILER) || defined(TESTING)
5369 const uintptr_t function_address =
5370 FfiResolveInternal(asset_id, symbol, arg_n, &error);
5371#else
5372 const uintptr_t function_address = 0;
5373 UNREACHABLE(); // JIT runtime should not contain AOT code
5374#endif
5375 if (error == nullptr) {
5376 Fragment body;
5377 body += UnboxedIntConstant(function_address, kUnboxedAddress);
5378 body += ConvertUnboxedToUntagged();
5379 return body;
5380 } else {
5381 free(error);
5382 // Lookup failed, we want to throw an error consistent with AOT, just
5383 // compile into a lookup so that we can throw the error from the same
5384 // error path.
5385 Fragment body;
5386 body += Constant(asset_id);
5387 body += Constant(symbol);
5388 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5389 // Non-cacheable call, this is IA32.
5390 body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
5391 /*argument_count=*/3, ICData::kStatic);
5392 body += UnboxTruncate(kUnboxedAddress);
5393 body += ConvertUnboxedToUntagged();
5394 return body;
5395 }
5396#endif // !defined(TARGET_ARCH_IA32)
5397}
5398
5399Fragment FlowGraphBuilder::FfiNativeFunctionBody(const Function& function) {
5400 ASSERT(function.is_ffi_native());
5401 ASSERT(!IsRecognizedMethodForFlowGraph(function));
5402 ASSERT(optimizing_);
5403
5404 const auto& c_signature =
5405 FunctionType::ZoneHandle(Z, function.FfiCSignature());
5406 auto const& native_instance =
5407 Instance::Handle(function.GetNativeAnnotation());
5408
5409 Fragment body;
5410 body += FfiNativeLookupAddress(native_instance);
5411 body += FfiCallFunctionBody(function, c_signature,
5412 /*first_argument_parameter_offset=*/0);
5413 return body;
5414}
5415
5416Fragment FlowGraphBuilder::FfiCallFunctionBody(
5417 const Function& function,
5418 const FunctionType& c_signature,
5419 intptr_t first_argument_parameter_offset) {
5420 ASSERT(function.is_ffi_native() || function.IsFfiCallClosure());
5421
5422 LocalVariable* address = MakeTemporary("address");
5423
5424 Fragment body;
5425
5426 const char* error = nullptr;
5427 const auto marshaller_ptr = compiler::ffi::CallMarshaller::FromFunction(
5428 Z, function, first_argument_parameter_offset, c_signature, &error);
5429 // AbiSpecific integers can be incomplete causing us to not know the calling
5430 // convention. However, this is caught in asFunction in both JIT/AOT.
5431 RELEASE_ASSERT(error == nullptr);
5432 RELEASE_ASSERT(marshaller_ptr != nullptr);
5433 const auto& marshaller = *marshaller_ptr;
5434
5435 const bool signature_contains_handles = marshaller.ContainsHandles();
5436
5437 // FFI trampolines are accessed via closures, so non-covariant argument types
5438 // and type arguments are either statically checked by the type system or
5439 // dynamically checked via dynamic closure call dispatchers.
5440
5441 // Null check arguments before we go into the try catch, so that we don't
5442 // catch our own null errors.
5443 const intptr_t num_args = marshaller.num_args();
5444 for (intptr_t i = 0; i < num_args; i++) {
5445 if (marshaller.IsHandleCType(i)) {
5446 continue;
5447 }
5448 body += LoadLocal(parsed_function_->ParameterVariable(
5449 first_argument_parameter_offset + i));
5450 // TODO(http://dartbug.com/47486): Support entry without checking for null.
5451 // Check for 'null'.
5452 body += CheckNullOptimized(
5453 String::ZoneHandle(
5454 Z, function.ParameterNameAt(first_argument_parameter_offset + i)),
5455 CheckNullInstr::kArgumentError);
5456 body += StoreLocal(TokenPosition::kNoSource,
5457 parsed_function_->ParameterVariable(
5458 first_argument_parameter_offset + i));
5459 body += Drop();
5460 }
5461
5462 intptr_t try_handler_index = -1;
5463 if (signature_contains_handles) {
5464 // Wrap in Try catch to transition from Native to Generated on a throw from
5465 // the dart_api.
5466 try_handler_index = AllocateTryIndex();
5467 body += TryCatch(try_handler_index);
5468 ++try_depth_;
5469 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5470 // need it.
5471 // We no longer need the scope for passing in Handle arguments, but the
5472 // native function might for instance be relying on this scope for Dart API.
5473
5474 auto* const arg_reps =
5475 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5476
5477 body += LoadThread(); // argument.
5478 arg_reps->Add(kUntagged);
5479
5480 body += CallLeafRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged,
5481 *arg_reps);
5482 }
5483
5484 // Allocate typed data before FfiCall and pass it in to ffi call if needed.
5485 LocalVariable* return_compound_typed_data = nullptr;
5486 if (marshaller.ReturnsCompound()) {
5487 body += IntConstant(marshaller.CompoundReturnSizeInBytes());
5488 body +=
5489 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5490 return_compound_typed_data = MakeTemporary();
5491 }
5492
5493 // Unbox and push the arguments.
5494 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5495 if (marshaller.IsCompoundCType(i)) {
5496 body += FfiCallConvertCompoundArgumentToNative(
5497 parsed_function_->ParameterVariable(first_argument_parameter_offset +
5498 i),
5499 marshaller, i);
5500 } else {
5501 body += LoadLocal(parsed_function_->ParameterVariable(
5502 first_argument_parameter_offset + i));
5503 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5504 // pointer to the stack slot as the native handle argument.
5505 // Therefore we do not need to wrap handles.
5506 if (!marshaller.IsHandleCType(i)) {
5507 body += FfiConvertPrimitiveToNative(
5508 marshaller, i,
5509 parsed_function_->ParameterVariable(
5510 first_argument_parameter_offset + i));
5511 }
5512 }
5513 }
5514
5515 body += LoadLocal(address);
5516
5517 if (marshaller.ReturnsCompound()) {
5518 body += LoadLocal(return_compound_typed_data);
5519 }
5520
5521 body += FfiCall(marshaller, function.FfiIsLeaf());
5522
5523 const intptr_t num_defs = marshaller.NumReturnDefinitions();
5524 ASSERT(num_defs >= 1);
5525 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5526 LocalVariable* def = MakeTemporary("ffi call result");
5527 defs->Add(def);
5528
5529 if (marshaller.ReturnsCompound()) {
5530 // Drop call result, typed data with contents is already on the stack.
5531 body += DropTemporary(&def);
5532 }
5533
5534 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5535 body += FfiCallConvertCompoundReturnToDart(marshaller,
5536 compiler::ffi::kResultIndex);
5537 } else {
5538 body += FfiConvertPrimitiveToDart(marshaller, compiler::ffi::kResultIndex);
5539 }
5540
5541 auto exit_handle_scope = [&]() -> Fragment {
5542 Fragment code;
5543 auto* const arg_reps =
5544 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5545
5546 code += LoadThread(); // argument.
5547 arg_reps->Add(kUntagged);
5548
5549 code += CallLeafRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged,
5550 *arg_reps);
5551 code += Drop();
5552 return code;
5553 };
5554
5555 if (signature_contains_handles) {
5556 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5557 // need it.
5558 body += DropTempsPreserveTop(1); // Drop api_local_scope.
5559 body += exit_handle_scope();
5560 }
5561
5562 body += DropTempsPreserveTop(1); // Drop address.
5563 body += Return(TokenPosition::kNoSource);
5564
5565 if (signature_contains_handles) {
5566 --try_depth_;
5567 ++catch_depth_;
5568 Fragment catch_body =
5569 CatchBlockEntry(Array::empty_array(), try_handler_index,
5570 /*needs_stacktrace=*/true, /*is_synthesized=*/true);
5571
5572 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5573 // need it.
5574 // TODO(41984): If we want to pass in the handle scope, move it out
5575 // of the try catch.
5576 catch_body += exit_handle_scope();
5577
5578 catch_body += LoadLocal(CurrentException());
5579 catch_body += LoadLocal(CurrentStackTrace());
5580 catch_body += RethrowException(TokenPosition::kNoSource, try_handler_index);
5581 --catch_depth_;
5582 }
5583
5584 return body;
5585}
5586
5587Fragment FlowGraphBuilder::LoadNativeArg(
5588 const compiler::ffi::CallbackMarshaller& marshaller,
5589 intptr_t arg_index) {
5590 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
5591 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5592
5593 Fragment fragment;
5594 for (intptr_t j = 0; j < num_defs; j++) {
5595 const intptr_t def_index = marshaller.DefinitionIndex(j, arg_index);
5596 auto* parameter = new (Z) NativeParameterInstr(marshaller, def_index);
5597 Push(parameter);
5598 fragment <<= parameter;
5599 LocalVariable* def = MakeTemporary();
5600 defs->Add(def);
5601 }
5602
5603 if (marshaller.IsCompoundCType(arg_index)) {
5604 fragment +=
5605 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index, defs);
5606 } else {
5607 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
5608 }
5609 return fragment;
5610}
5611
5612FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
5613 const Function& function) {
5614 const char* error = nullptr;
5615 const auto marshaller_ptr =
5616 compiler::ffi::CallbackMarshaller::FromFunction(Z, function, &error);
5617 // AbiSpecific integers can be incomplete causing us to not know the calling
5618 // convention. However, this is caught fromFunction in both JIT/AOT.
5619 RELEASE_ASSERT(error == nullptr);
5620 RELEASE_ASSERT(marshaller_ptr != nullptr);
5621 const auto& marshaller = *marshaller_ptr;
5622 const bool is_closure = function.GetFfiCallbackKind() ==
5623 FfiCallbackKind::kIsolateLocalClosureCallback;
5624
5625 graph_entry_ =
5626 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5627
5628 auto* const native_entry =
5629 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5630 CurrentTryIndex(), GetNextDeoptId());
5631
5632 graph_entry_->set_normal_entry(native_entry);
5633
5634 Fragment function_body(native_entry);
5635 function_body += CheckStackOverflowInPrologue(function.token_pos());
5636
5637 // Wrap the entire method in a big try/catch. This is important to ensure that
5638 // the VM does not crash if the callback throws an exception.
5639 const intptr_t try_handler_index = AllocateTryIndex();
5640 Fragment body = TryCatch(try_handler_index);
5641 ++try_depth_;
5642
5643 LocalVariable* closure = nullptr;
5644 if (is_closure) {
5645 // Load and unwrap closure persistent handle.
5646 body += LoadThread();
5647 body +=
5648 LoadUntagged(compiler::target::Thread::unboxed_runtime_arg_offset());
5649 body += LoadNativeField(Slot::PersistentHandle_ptr());
5650 closure = MakeTemporary();
5651 }
5652
5653 // Box and push the arguments.
5654 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5655 body += LoadNativeArg(marshaller, i);
5656 }
5657
5658 if (is_closure) {
5659 // Call the target. The +1 in the argument count is because the closure
5660 // itself is the first argument.
5661 const intptr_t argument_count = marshaller.num_args() + 1;
5662 body += LoadLocal(closure);
5663 if (!FLAG_precompiled_mode) {
5664 // The ClosureCallInstr() takes one explicit input (apart from arguments).
5665 // It uses it to find the target address (in AOT from
5666 // Closure::entry_point, in JIT from Closure::function_::entry_point).
5667 body += LoadNativeField(Slot::Closure_function());
5668 }
5669 body +=
5670 ClosureCall(Function::null_function(), TokenPosition::kNoSource,
5671 /*type_args_len=*/0, argument_count, Array::null_array());
5672 } else {
5673 // Call the target.
5674 //
5675 // TODO(36748): Determine the hot-reload semantics of callbacks and update
5676 // the rebind-rule accordingly.
5677 body += StaticCall(TokenPosition::kNoSource,
5678 Function::ZoneHandle(Z, function.FfiCallbackTarget()),
5679 marshaller.num_args(), Array::empty_array(),
5680 ICData::kNoRebind);
5681 }
5682
5683 if (!marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5684 body += CheckNullOptimized(
5685 String::ZoneHandle(Z, Symbols::New(H.thread(), "return_value")),
5686 CheckNullInstr::kArgumentError);
5687 }
5688
5689 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5690 body += FfiCallbackConvertCompoundReturnToNative(
5691 marshaller, compiler::ffi::kResultIndex);
5692 } else {
5693 body +=
5694 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5695 }
5696
5697 body += NativeReturn(marshaller);
5698
5699 --try_depth_;
5700 function_body += body;
5701
5702 ++catch_depth_;
5703 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5704 /*needs_stacktrace=*/false,
5705 /*is_synthesized=*/true);
5706
5707 // Return the "exceptional return" value given in 'fromFunction'.
5708 if (marshaller.IsVoid(compiler::ffi::kResultIndex)) {
5709 // The exceptional return is always null -- return nullptr instead.
5710 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5711 catch_body += UnboxedIntConstant(0, kUnboxedIntPtr);
5712 } else if (marshaller.IsPointerPointer(compiler::ffi::kResultIndex)) {
5713 // The exceptional return is always null -- return nullptr instead.
5714 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5715 catch_body += UnboxedIntConstant(0, kUnboxedAddress);
5716 catch_body += ConvertUnboxedToUntagged();
5717 } else if (marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5718 catch_body += UnhandledException();
5719 catch_body +=
5720 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5721 } else if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5722 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5723 // Manufacture empty result.
5724 const intptr_t size =
5725 Utils::RoundUp(marshaller.Location(compiler::ffi::kResultIndex)
5726 .payload_type()
5727 .SizeInBytes(),
5728 compiler::target::kWordSize);
5729 catch_body += IntConstant(size);
5730 catch_body +=
5731 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5732 catch_body += WrapTypedDataBaseInCompound(
5733 AbstractType::Handle(Z, marshaller.CType(compiler::ffi::kResultIndex)));
5734 catch_body += FfiCallbackConvertCompoundReturnToNative(
5735 marshaller, compiler::ffi::kResultIndex);
5736
5737 } else {
5738 catch_body += Constant(
5739 Instance::ZoneHandle(Z, function.FfiCallbackExceptionalReturn()));
5740 catch_body +=
5741 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5742 }
5743
5744 catch_body += NativeReturn(marshaller);
5745 --catch_depth_;
5746
5747 PrologueInfo prologue_info(-1, -1);
5748 return new (Z)
5749 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5750 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5751}
5752
5753FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5754 const Function& function) {
5755 const char* error = nullptr;
5756 const auto marshaller_ptr =
5757 compiler::ffi::CallbackMarshaller::FromFunction(Z, function, &error);
5758 // AbiSpecific integers can be incomplete causing us to not know the calling
5759 // convention. However, this is caught fromFunction in both JIT/AOT.
5760 RELEASE_ASSERT(error == nullptr);
5761 RELEASE_ASSERT(marshaller_ptr != nullptr);
5762 const auto& marshaller = *marshaller_ptr;
5763
5764 // Currently all async FFI callbacks return void. This is enforced by the
5765 // frontend.
5766 ASSERT(marshaller.IsVoid(compiler::ffi::kResultIndex));
5767
5768 graph_entry_ =
5769 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5770
5771 auto* const native_entry =
5772 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5773 CurrentTryIndex(), GetNextDeoptId());
5774
5775 graph_entry_->set_normal_entry(native_entry);
5776
5777 Fragment function_body(native_entry);
5778 function_body += CheckStackOverflowInPrologue(function.token_pos());
5779
5780 // Wrap the entire method in a big try/catch. This is important to ensure that
5781 // the VM does not crash if the callback throws an exception.
5782 const intptr_t try_handler_index = AllocateTryIndex();
5783 Fragment body = TryCatch(try_handler_index);
5784 ++try_depth_;
5785
5786 // Box and push the arguments into an array, to be sent to the target.
5787 body += Constant(TypeArguments::ZoneHandle(Z, TypeArguments::null()));
5788 body += IntConstant(marshaller.num_args());
5789 body += CreateArray();
5790 LocalVariable* array = MakeTemporary();
5791 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5792 body += LoadLocal(array);
5793 body += IntConstant(i);
5794 body += LoadNativeArg(marshaller, i);
5795 body += StoreIndexed(kArrayCid);
5796 }
5797
5798 // Send the arg array to the target. The arg array is still on the stack.
5799 body += Call1ArgStub(TokenPosition::kNoSource,
5800 Call1ArgStubInstr::StubId::kFfiAsyncCallbackSend);
5801
5802 body += FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5803 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5804 body += NativeReturn(marshaller);
5805
5806 --try_depth_;
5807 function_body += body;
5808
5809 ++catch_depth_;
5810 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5811 /*needs_stacktrace=*/false,
5812 /*is_synthesized=*/true);
5813
5814 // This catch indicates there's been some sort of error, but async callbacks
5815 // are fire-and-forget, and we don't guarantee delivery.
5816 catch_body += NullConstant();
5817 catch_body +=
5818 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5819 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5820 catch_body += NativeReturn(marshaller);
5821 --catch_depth_;
5822
5823 PrologueInfo prologue_info(-1, -1);
5824 return new (Z)
5825 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5826 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5827}
5828
5829void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5830 try_catch_block_ = try_catch_block;
5831 SetCurrentTryIndex(try_catch_block == nullptr ? kInvalidTryIndex
5832 : try_catch_block->try_index());
5833}
5834
5835const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5836 if (prepend_type_arguments_.IsNull()) {
5837 const auto& dart_internal = Library::Handle(Z, Library::InternalLibrary());
5838 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5839 Symbols::PrependTypeArguments());
5840 ASSERT(!prepend_type_arguments_.IsNull());
5841 }
5842 return prepend_type_arguments_;
5843}
5844
5845Fragment FlowGraphBuilder::BuildIntegerHashCode(bool smi) {
5846 Fragment body;
5847 Value* unboxed_value = Pop();
5848 HashIntegerOpInstr* hash =
5849 new HashIntegerOpInstr(unboxed_value, smi, DeoptId::kNone);
5850 Push(hash);
5851 body <<= hash;
5852 return body;
5853}
5854
5855Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5856 Fragment body;
5857 Value* double_value = Pop();
5858 HashDoubleOpInstr* hash = new HashDoubleOpInstr(double_value, DeoptId::kNone);
5859 Push(hash);
5860 body <<= hash;
5861 body += Box(kUnboxedInt64);
5862 return body;
5863}
5864
5865SwitchHelper::SwitchHelper(Zone* zone,
5866 TokenPosition position,
5867 bool is_exhaustive,
5868 const AbstractType& expression_type,
5869 SwitchBlock* switch_block,
5870 intptr_t case_count)
5871 : zone_(zone),
5872 position_(position),
5873 is_exhaustive_(is_exhaustive),
5874 expression_type_(expression_type),
5875 switch_block_(switch_block),
5876 case_count_(case_count),
5877 case_bodies_(case_count),
5878 case_expression_counts_(case_count),
5879 expressions_(case_count),
5880 sorted_expressions_(case_count) {
5881 case_expression_counts_.FillWith(0, 0, case_count);
5882
5883 if (expression_type.nullability() == Nullability::kNonNullable) {
5884 if (expression_type.IsIntType() || expression_type.IsSmiType()) {
5885 is_optimizable_ = true;
5886 } else if (expression_type.HasTypeClass() &&
5887 Class::Handle(zone_, expression_type.type_class())
5888 .is_enum_class()) {
5889 is_optimizable_ = true;
5890 is_enum_switch_ = true;
5891 }
5892 }
5893}
5894
5895int64_t SwitchHelper::ExpressionRange() const {
5896 const int64_t min = expression_min().AsInt64Value();
5897 const int64_t max = expression_max().AsInt64Value();
5898 ASSERT(min <= max);
5899 const uint64_t diff = static_cast<uint64_t>(max) - static_cast<uint64_t>(min);
5900 // Saturate to avoid overflow.
5901 if (diff > static_cast<uint64_t>(kMaxInt64 - 1)) {
5902 return kMaxInt64;
5903 }
5904 return static_cast<int64_t>(diff + 1);
5905}
5906
5907bool SwitchHelper::RequiresLowerBoundCheck() const {
5908 if (is_enum_switch()) {
5909 if (expression_min().IsZero()) {
5910 // Enum indexes are always positive.
5911 return false;
5912 }
5913 }
5914 return true;
5915}
5916
5917bool SwitchHelper::RequiresUpperBoundCheck() const {
5918 if (is_enum_switch()) {
5919 return has_default() || !is_exhaustive();
5920 }
5921 return true;
5922}
5923
5924SwitchDispatch SwitchHelper::SelectDispatchStrategy() {
5925 // For small to medium-sized switches, binary search is faster than a
5926 // jump table.
5927 // Please update runtime/tests/vm/dart/optimized_switch_test.dart
5928 // when changing this constant.
5929 const intptr_t kJumpTableMinExpressions = 16;
5930 // This limit comes from IndirectGotoInstr.
5931 // Realistically, the current limit should never be hit by any code.
5932 const intptr_t kJumpTableMaxSize = kMaxInt32;
5933 // Sometimes the switch expressions don't cover a contiguous range.
5934 // If the ratio of holes to expressions is too great we fall back to a
5935 // binary search to avoid code size explosion.
5936 const double kJumpTableMaxHolesRatio = 1.0;
5937
5938 if (!is_optimizable() || expressions().is_empty()) {
5939 // The switch is not optimizable, so we can only use linear scan.
5941 }
5942
5943 if (!CompilerState::Current().is_aot()) {
5944 // JIT mode supports hot-reload, which currently prevents us from
5945 // enabling optimized switches.
5947 }
5948
5949 if (FLAG_force_switch_dispatch_type == kSwitchDispatchLinearScan) {
5951 }
5952
5953 PrepareForOptimizedSwitch();
5954
5955 if (!is_optimizable()) {
5956 // While preparing for an optimized switch we might have discovered that
5957 // the switch is not optimizable after all.
5959 }
5960
5961 if (FLAG_force_switch_dispatch_type == kSwitchDispatchBinarySearch) {
5963 }
5964
5965 const int64_t range = ExpressionRange();
5966 if (range > kJumpTableMaxSize) {
5968 }
5969
5970 const intptr_t num_expressions = expressions().length();
5971 ASSERT(num_expressions <= range);
5972
5973 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5974 const int64_t holes = range - num_expressions;
5975
5976 if (FLAG_force_switch_dispatch_type != kSwitchDispatchJumpTable) {
5977 if (num_expressions < kJumpTableMinExpressions) {
5979 }
5980
5981 if (holes > max_holes) {
5983 }
5984 }
5985
5986 // After this point we will use a jump table.
5987
5988 // In the general case, bounds checks are required before a jump table
5989 // to handle all possible integer values.
5990 // For enums, the set of possible index values is known and much smaller
5991 // than the set of all possible integer values. A jump table that covers
5992 // either or both bounds of the range of index values requires only one or
5993 // no bounds checks.
5994 // If the expressions of an enum switch don't cover the full range of
5995 // values we can try to extend the jump table to cover the full range, but
5996 // not beyond kJumpTableMaxHolesRatio.
5997 // The count of enum values is not available when the flow graph is
5998 // constructed. The lower bound is always 0 so eliminating the lower
5999 // bound check is still possible by extending expression_min to 0.
6000 //
6001 // In the case of an integer switch we try to extend expression_min to 0
6002 // for a different reason.
6003 // If the range starts at zero it directly maps to the jump table
6004 // and we don't need to adjust the switch variable before the
6005 // jump table.
6006 if (expression_min().AsInt64Value() > 0) {
6007 const intptr_t holes_budget = Utils::Minimum(
6008 // Holes still available.
6009 max_holes - holes,
6010 // Entries left in the jump table.
6011 kJumpTableMaxSize - range);
6012
6013 const int64_t required_holes = expression_min().AsInt64Value();
6014 if (required_holes <= holes_budget) {
6015 expression_min_ = &Object::smi_zero();
6016 }
6017 }
6018
6020}
6021
6022void SwitchHelper::PrepareForOptimizedSwitch() {
6023 // Find the min and max of integer representations of expressions.
6024 // We also populate SwitchExpressions.integer for later use.
6025 const Field* enum_index_field = nullptr;
6026 for (intptr_t i = 0; i < expressions_.length(); ++i) {
6027 SwitchExpression& expression = expressions_[i];
6028 sorted_expressions_.Add(&expression);
6029
6030 const Instance& value = expression.value();
6031 const Integer* integer = nullptr;
6032 if (is_enum_switch()) {
6033 if (enum_index_field == nullptr) {
6034 enum_index_field =
6035 &Field::Handle(zone_, IG->object_store()->enum_index_field());
6036 }
6037 integer = &Integer::ZoneHandle(
6038 zone_, Integer::RawCast(value.GetField(*enum_index_field)));
6039 } else {
6040 integer = &Integer::Cast(value);
6041 }
6042 expression.set_integer(*integer);
6043 if (i == 0) {
6044 expression_min_ = integer;
6045 expression_max_ = integer;
6046 } else {
6047 if (expression_min_->CompareWith(*integer) > 0) {
6048 expression_min_ = integer;
6049 }
6050 if (expression_max_->CompareWith(*integer) < 0) {
6051 expression_max_ = integer;
6052 }
6053 }
6054 }
6055
6056 // Sort expressions by their integer value.
6057 sorted_expressions_.Sort(
6058 [](SwitchExpression* const* a, SwitchExpression* const* b) {
6059 return (*a)->integer().CompareWith((*b)->integer());
6060 });
6061
6062 // Check that there are no duplicate case expressions.
6063 // Duplicate expressions are allowed in switch statements, but
6064 // optimized switches don't implemented them.
6065 for (intptr_t i = 0; i < sorted_expressions_.length() - 1; ++i) {
6066 const SwitchExpression& a = *sorted_expressions_.At(i);
6067 const SwitchExpression& b = *sorted_expressions_.At(i + 1);
6068 if (a.integer().Equals(b.integer())) {
6069 is_optimizable_ = false;
6070 break;
6071 }
6072 }
6073}
6074
6075void SwitchHelper::AddExpression(intptr_t case_index,
6076 TokenPosition position,
6077 const Instance& value) {
6078 case_expression_counts_[case_index]++;
6079
6080 expressions_.Add(SwitchExpression(case_index, position, value));
6081
6082 if (is_optimizable_) {
6083 // Check the type of the case expression for use in an optimized switch.
6084 if (!value.IsInstanceOf(expression_type_, Object::null_type_arguments(),
6085 Object::null_type_arguments())) {
6086 is_optimizable_ = false;
6087 }
6088 }
6089}
6090
6091} // namespace kernel
6092
6093} // namespace dart
static int step(int x, SkScalar min, SkScalar max)
Definition BlurTest.cpp:215
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
static bool match(const char *needle, const char *haystack)
Definition DM.cpp:1132
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
SkPoint pos
static bool equal(const SkBitmap &a, const SkBitmap &b)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
void check_bounds(skiatest::Reporter *reporter, const SkPath &path)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
SI void store(P *ptr, const T &val)
static size_t element_size(Layout layout, SkSLType type)
#define UNREACHABLE()
Definition assert.h:248
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define ASSERT_NOTNULL(ptr)
Definition assert.h:323
#define CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:137
#define DART_CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:177
ObjectPtr GetField(const Field &field) const
Definition object.cc:20516
virtual TypeArgumentsPtr GetTypeArguments() const
Definition object.cc:20611
ClassPtr clazz() const
Definition object.h:13192
static Editor::Movement convert(skui::Key key)
#define ASSERT(E)
static bool b
struct MyStruct a[10]
AtkStateType state
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
int argument_count
Definition fuchsia.cc:52
static float max(float r, float g, float b)
Definition hsl.cpp:49
static float min(float r, float g, float b)
Definition hsl.cpp:48
#define LOAD_NATIVE_FIELD(V)
#define CASE(method, slot)
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
#define IG
#define Z
#define STORE_NATIVE_FIELD(V)
#define IL_BODY(method, slot)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
size_t length
static const Function & TypedListSetNativeFunction(Thread *thread, classid_t cid)
static classid_t TypedDataCidUnboxed(Representation unboxed_representation)
@ kSwitchDispatchLinearScan
@ kSwitchDispatchBinarySearch
static classid_t external_typed_data_cid(intptr_t chunk_size)
static classid_t typed_data_cid(intptr_t chunk_size)
const Function & TypedListGetNativeFunction(Thread *thread, classid_t cid)
static intptr_t chunk_size(intptr_t bytes_left)
static bool CanUnboxElements(classid_t cid)
constexpr int64_t kMaxInt64
Definition globals.h:486
IntegerPtr DoubleToInteger(Zone *zone, double val)
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
int32_t classid_t
Definition globals.h:524
Representation
Definition locations.h:66
constexpr intptr_t kBitsPerByte
Definition globals.h:463
bool IsZero(char *begin, char *end)
constexpr int32_t kMaxInt32
Definition globals.h:483
intptr_t FfiResolveInternal(const String &asset, const String &symbol, uintptr_t args_n, char **error)
DART_FORCE_INLINE ObjectPtr AllocateObject(intptr_t cid, intptr_t size, intptr_t allocated_bytes)
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
AlignmentType
Definition il.h:6720
call(args)
Definition dom.py:159
Dest BitCast(const Source &source)
Definition utils.h:395
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition switches.h:228
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition switches.h:259
std::function< void()> closure
Definition closure.h:14
SINT Vec< 2 *N, T > join(const Vec< N, T > &lo, const Vec< N, T > &hi)
Definition SkVx.h:242
Point offset
Definition SkMD5.cpp:130

◆ STORE_NATIVE_FIELD

#define STORE_NATIVE_FIELD (   V)
Value:
V(Finalizer_setCallback, Finalizer_callback) \
V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
V(FinalizerEntry_setToken, FinalizerEntry_token) \
V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
V(ReceivePort_setHandler, ReceivePort_handler) \
V(LinkedHashBase_setData, LinkedHashBase_data) \
V(LinkedHashBase_setIndex, LinkedHashBase_index) \
V(SuspendState_setFunctionData, SuspendState_function_data) \
V(SuspendState_setThenCallback, SuspendState_then_callback) \
V(SuspendState_setErrorCallback, SuspendState_error_callback) \
V(WeakProperty_setKey, WeakProperty_key) \
V(WeakProperty_setValue, WeakProperty_value) \
V(WeakReference_setTarget, WeakReference_target)
#define V(name)
Definition raw_object.h:124

Definition at line 937 of file kernel_to_il.cc.

◆ STORE_NATIVE_FIELD_NO_BARRIER

#define STORE_NATIVE_FIELD_NO_BARRIER (   V)
Value:
V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)

Definition at line 953 of file kernel_to_il.cc.

◆ T

#define T   (type_translator_)

Definition at line 60 of file kernel_to_il.cc.

◆ TYPED_DATA_GET_INDEXED_CASES [1/2]

#define TYPED_DATA_GET_INDEXED_CASES (   clazz)
Value:
case MethodRecognizer::k##clazz##ArrayGetIndexed: \
case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
#define FALL_THROUGH
Definition globals.h:15

◆ TYPED_DATA_GET_INDEXED_CASES [2/2]

#define TYPED_DATA_GET_INDEXED_CASES (   clazz)
Value:
case MethodRecognizer::k##clazz##ArrayGetIndexed: \
case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \

◆ Z

#define Z   (zone_)

Definition at line 58 of file kernel_to_il.cc.