6#if defined(TARGET_ARCH_IA32)
23class UnoptimizedCall :
public ValueObject {
25 UnoptimizedCall(
uword return_address,
const Code&
code)
26 : code_(
code), start_(return_address - kPatternSize) {
30 ObjectPtr ic_data()
const {
31 return LoadUnaligned(
reinterpret_cast<ObjectPtr*
>(start_ + 1));
34 static constexpr int kMovInstructionSize = 5;
35 static constexpr int kCallInstructionSize = 3;
36 static constexpr int kPatternSize =
37 2 * kMovInstructionSize + kCallInstructionSize;
41 uint8_t* code_bytes =
reinterpret_cast<uint8_t*
>(start_);
42 return (code_bytes[0] == 0xB9) &&
43 (code_bytes[2 * kMovInstructionSize] == 0xFF);
46 uword return_address()
const {
return start_ + kPatternSize; }
48 uword call_address()
const {
return start_ + 2 * kMovInstructionSize; }
58class NativeCall :
public UnoptimizedCall {
60 NativeCall(
uword return_address,
const Code&
code)
61 : UnoptimizedCall(return_address,
code) {}
69 WritableInstructionsScope writable(start_ + 1,
sizeof(func));
81class InstanceCall :
public UnoptimizedCall {
83 InstanceCall(
uword return_address,
const Code&
code)
84 : UnoptimizedCall(return_address,
code) {
87 ASSERT(test_data.IsArray() || test_data.IsICData() ||
88 test_data.IsMegamorphicCache());
89 if (test_data.IsICData()) {
90 ASSERT(ICData::Cast(test_data).NumArgsTested() > 0);
95 ObjectPtr
data()
const {
96 return LoadUnaligned(
reinterpret_cast<ObjectPtr*
>(start_ + 1));
98 void set_data(
const Object&
data)
const {
101 code_.StorePointerUnaligned(
reinterpret_cast<ObjectPtr*
>(start_ + 1),
106 return LoadUnaligned(
reinterpret_cast<CodePtr*
>(start_ + 6));
108 void set_target(
const Code&
target)
const {
111 code_.StorePointerUnaligned(
reinterpret_cast<CodePtr*
>(start_ + 6),
119class UnoptimizedStaticCall :
public UnoptimizedCall {
121 UnoptimizedStaticCall(
uword return_address,
const Code&
code)
122 : UnoptimizedCall(return_address,
code) {
125 test_ic_data ^= ic_data();
126 ASSERT(test_ic_data.NumArgsTested() >= 0);
139class StaticCall :
public ValueObject {
141 StaticCall(
uword return_address,
const Code&
code)
143 start_(return_address - (kMovInstructionSize + kCallInstructionSize)) {
148 uint8_t* code_bytes =
reinterpret_cast<uint8_t*
>(start_);
149 return (code_bytes[0] == 0xBF) && (code_bytes[5] == 0xFF);
153 return LoadUnaligned(
reinterpret_cast<CodePtr*
>(start_ + 1));
156 void set_target(
const Code&
target)
const {
159 code_.StorePointerUnaligned(
reinterpret_cast<CodePtr*
>(start_ + 1),
163 static constexpr int kMovInstructionSize = 5;
164 static constexpr int kCallInstructionSize = 3;
167 uword return_address()
const {
168 return start_ + kMovInstructionSize + kCallInstructionSize;
171 uword call_address()
const {
return start_ + kMovInstructionSize; }
181 ASSERT(
code.ContainsInstructionAt(return_address));
182 StaticCall
call(return_address,
code);
183 return call.target();
188 const Code& new_target) {
190 auto zone = thread->zone();
192 thread->isolate_group()->RunWithStoppedMutators([&]() {
193 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
194 ASSERT(
code.ContainsInstructionAt(return_address));
195 StaticCall
call(return_address,
code);
196 call.set_target(new_target);
205 const Code& caller_code,
207 ASSERT(caller_code.ContainsInstructionAt(return_address));
208 InstanceCall
call(return_address, caller_code);
209 if (
data !=
nullptr) {
212 return call.target();
216 const Code& caller_code,
220 thread->isolate_group()->RunWithStoppedMutators([&]() {
228 uword return_address,
229 const Code& caller_code,
232 auto zone = thread->zone();
233 ASSERT(caller_code.ContainsInstructionAt(return_address));
234 const Instructions& instrs =
236 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
237 InstanceCall
call(return_address, caller_code);
243 const Code& caller_code,
244 ICData* ic_data_result) {
245 ASSERT(caller_code.ContainsInstructionAt(return_address));
246 UnoptimizedStaticCall static_call(return_address, caller_code);
248 ic_data ^= static_call.ic_data();
249 if (ic_data_result !=
nullptr) {
250 *ic_data_result = ic_data.ptr();
252 return ic_data.GetTargetAt(0);
256 const Code& caller_code,
265 uword return_address,
266 const Code& caller_code,
274 const Code& caller_code) {
281 const Code& caller_code) {
288 const Code& caller_code,
290 const Code& trampoline) {
295 const Code& caller_code,
static void PatchInstanceCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static CodePtr GetStaticCallTargetAt(uword return_address, const Code &code)
static void PatchSwitchableCallAtWithMutatorsStopped(Thread *thread, uword return_address, const Code &caller_code, const Object &data, const Code &target)
static void PatchInstanceCallAtWithMutatorsStopped(Thread *thread, uword return_address, const Code &caller_code, const Object &data, const Code &target)
static void PatchSwitchableCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static FunctionPtr GetUnoptimizedStaticCallAt(uword return_address, const Code &code, ICData *ic_data)
static uword GetSwitchableCallTargetEntryAt(uword return_address, const Code &caller_code)
static ObjectPtr GetSwitchableCallDataAt(uword return_address, const Code &caller_code)
static void InsertDeoptimizationCallAt(uword start)
static CodePtr GetInstanceCallAt(uword return_address, const Code &caller_code, Object *data)
static CodePtr GetNativeCallAt(uword return_address, const Code &caller_code, NativeFunction *target)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
static void PatchNativeCallAt(uword return_address, const Code &caller_code, NativeFunction target, const Code &trampoline)
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
static Thread * Current()
IsolateGroup * isolate_group() const
static T LoadUnaligned(const T *ptr)
static void StoreUnaligned(T *ptr, T value)
static int8_t data[kExtLength]
void(* NativeFunction)(NativeArguments *arguments)