Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
asm_intrinsifier_ia32.cc
Go to the documentation of this file.
1// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4//
5// The intrinsic code below is executed before a method has built its frame.
6// The return address is on the stack and the arguments below it.
7// Registers EDX (arguments descriptor) and ECX (function) must be preserved.
8// Each intrinsification method returns true if the corresponding
9// Dart method was intrinsified.
10
11#include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
12#if defined(TARGET_ARCH_IA32)
13
14#define SHOULD_NOT_INCLUDE_RUNTIME
15
16#include "vm/class_id.h"
19
20namespace dart {
21namespace compiler {
22
23// When entering intrinsics code:
24// ECX: IC Data
25// EDX: Arguments descriptor
26// TOS: Return address
27// The ECX, EDX registers can be destroyed only if there is no slow-path, i.e.
28// if the intrinsified method always executes a return.
29// The EBP register should not be modified, because it is used by the profiler.
30// The THR register (see constants_ia32.h) must be preserved.
31
32#define __ assembler->
33
34// Tests if two top most arguments are smis, jumps to label not_smi if not.
35// Topmost argument is in EAX.
36static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) {
37 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
38 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
39 __ orl(EBX, EAX);
40 __ testl(EBX, Immediate(kSmiTagMask));
41 __ j(NOT_ZERO, not_smi, Assembler::kNearJump);
42}
43
44void AsmIntrinsifier::Integer_shl(Assembler* assembler, Label* normal_ir_body) {
45 ASSERT(kSmiTagShift == 1);
46 ASSERT(kSmiTag == 0);
47 Label overflow;
48 TestBothArgumentsSmis(assembler, normal_ir_body);
49 // Shift value is in EAX. Compare with tagged Smi.
50 __ cmpl(EAX, Immediate(target::ToRawSmi(target::kSmiBits)));
51 __ j(ABOVE_EQUAL, normal_ir_body, Assembler::kNearJump);
52
53 __ SmiUntag(EAX);
54 __ movl(ECX, EAX); // Shift amount must be in ECX.
55 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Value.
56
57 // Overflow test - all the shifted-out bits must be same as the sign bit.
58 __ movl(EBX, EAX);
59 __ shll(EAX, ECX);
60 __ sarl(EAX, ECX);
61 __ cmpl(EAX, EBX);
62 __ j(NOT_EQUAL, &overflow, Assembler::kNearJump);
63
64 __ shll(EAX, ECX); // Shift for result now we know there is no overflow.
65
66 // EAX is a correctly tagged Smi.
67 __ ret();
68
69 __ Bind(&overflow);
70 // Arguments are Smi but the shift produced an overflow to Mint.
71 __ cmpl(EBX, Immediate(0));
72 // TODO(srdjan): Implement negative values, for now fall through.
73 __ j(LESS, normal_ir_body, Assembler::kNearJump);
74 __ SmiUntag(EBX);
75 __ movl(EAX, EBX);
76 __ shll(EBX, ECX);
77 __ xorl(EDI, EDI);
78 __ shldl(EDI, EAX, ECX);
79 // Result in EDI (high) and EBX (low).
80 const Class& mint_class = MintClass();
81 __ TryAllocate(mint_class, normal_ir_body, Assembler::kNearJump,
82 EAX, // Result register.
83 ECX); // temp
84 // EBX and EDI are not objects but integer values.
85 __ movl(FieldAddress(EAX, target::Mint::value_offset()), EBX);
86 __ movl(FieldAddress(EAX, target::Mint::value_offset() + target::kWordSize),
87 EDI);
88 __ ret();
89 __ Bind(normal_ir_body);
90}
91
92static void Push64SmiOrMint(Assembler* assembler,
93 Register reg,
94 Register tmp,
95 Label* not_smi_or_mint) {
96 Label not_smi, done;
97 __ testl(reg, Immediate(kSmiTagMask));
98 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
99 __ SmiUntag(reg);
100 // Sign extend to 64 bit
101 __ movl(tmp, reg);
102 __ sarl(tmp, Immediate(31));
103 __ pushl(tmp);
104 __ pushl(reg);
105 __ jmp(&done);
106 __ Bind(&not_smi);
107 __ CompareClassId(reg, kMintCid, tmp);
108 __ j(NOT_EQUAL, not_smi_or_mint);
109 // Mint.
110 __ pushl(FieldAddress(reg, target::Mint::value_offset() + target::kWordSize));
111 __ pushl(FieldAddress(reg, target::Mint::value_offset()));
112 __ Bind(&done);
113}
114
115static void CompareIntegers(Assembler* assembler,
116 Label* normal_ir_body,
117 Condition true_condition) {
118 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through;
119 TestBothArgumentsSmis(assembler, &try_mint_smi);
120 // EAX contains the right argument.
121 __ cmpl(Address(ESP, +2 * target::kWordSize), EAX);
122 __ j(true_condition, &is_true, Assembler::kNearJump);
123 __ Bind(&is_false);
124 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
125 __ ret();
126 __ Bind(&is_true);
127 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
128 __ ret();
129
130 // 64-bit comparison
131 Condition hi_true_cond, hi_false_cond, lo_false_cond;
132 switch (true_condition) {
133 case LESS:
134 case LESS_EQUAL:
135 hi_true_cond = LESS;
136 hi_false_cond = GREATER;
137 lo_false_cond = (true_condition == LESS) ? ABOVE_EQUAL : ABOVE;
138 break;
139 case GREATER:
140 case GREATER_EQUAL:
141 hi_true_cond = GREATER;
142 hi_false_cond = LESS;
143 lo_false_cond = (true_condition == GREATER) ? BELOW_EQUAL : BELOW;
144 break;
145 default:
146 UNREACHABLE();
147 hi_true_cond = hi_false_cond = lo_false_cond = OVERFLOW;
148 }
149 __ Bind(&try_mint_smi);
150 // Note that EDX and ECX must be preserved in case we fall through to main
151 // method.
152 // EAX contains the right argument.
153 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Left argument.
154 // Push left as 64 bit integer.
155 Push64SmiOrMint(assembler, EBX, EDI, normal_ir_body);
156 // Push right as 64 bit integer.
157 Push64SmiOrMint(assembler, EAX, EDI, &drop_two_fall_through);
158 __ popl(EBX); // Right.LO.
159 __ popl(ECX); // Right.HI.
160 __ popl(EAX); // Left.LO.
161 __ popl(EDX); // Left.HI.
162 __ cmpl(EDX, ECX); // cmpl left.HI, right.HI.
163 __ j(hi_false_cond, &is_false, Assembler::kNearJump);
164 __ j(hi_true_cond, &is_true, Assembler::kNearJump);
165 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO.
166 __ j(lo_false_cond, &is_false, Assembler::kNearJump);
167 // Else is true.
168 __ jmp(&is_true);
169
170 __ Bind(&drop_two_fall_through);
171 __ Drop(2);
172 __ Bind(normal_ir_body);
173}
174
175void AsmIntrinsifier::Integer_lessThan(Assembler* assembler,
176 Label* normal_ir_body) {
177 CompareIntegers(assembler, normal_ir_body, LESS);
178}
179
180void AsmIntrinsifier::Integer_greaterThan(Assembler* assembler,
181 Label* normal_ir_body) {
182 CompareIntegers(assembler, normal_ir_body, GREATER);
183}
184
185void AsmIntrinsifier::Integer_lessEqualThan(Assembler* assembler,
186 Label* normal_ir_body) {
187 CompareIntegers(assembler, normal_ir_body, LESS_EQUAL);
188}
189
190void AsmIntrinsifier::Integer_greaterEqualThan(Assembler* assembler,
191 Label* normal_ir_body) {
192 CompareIntegers(assembler, normal_ir_body, GREATER_EQUAL);
193}
194
195// This is called for Smi and Mint receivers. The right argument
196// can be Smi, Mint or double.
197void AsmIntrinsifier::Integer_equalToInteger(Assembler* assembler,
198 Label* normal_ir_body) {
199 Label true_label, check_for_mint;
200 // For integer receiver '===' check first.
201 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
202 __ cmpl(EAX, Address(ESP, +2 * target::kWordSize));
203 __ j(EQUAL, &true_label, Assembler::kNearJump);
204 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
205 __ orl(EAX, EBX);
206 __ testl(EAX, Immediate(kSmiTagMask));
207 __ j(NOT_ZERO, &check_for_mint, Assembler::kNearJump);
208 // Both arguments are smi, '===' is good enough.
209 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
210 __ ret();
211 __ Bind(&true_label);
212 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
213 __ ret();
214
215 // At least one of the arguments was not Smi.
216 Label receiver_not_smi;
217 __ Bind(&check_for_mint);
218 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Receiver.
219 __ testl(EAX, Immediate(kSmiTagMask));
220 __ j(NOT_ZERO, &receiver_not_smi);
221
222 // Left (receiver) is Smi, return false if right is not Double.
223 // Note that an instance of Mint never contains a value that can be
224 // represented by Smi.
225 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Right argument.
226 __ CompareClassId(EAX, kDoubleCid, EDI);
227 __ j(EQUAL, normal_ir_body);
228 __ LoadObject(EAX,
229 CastHandle<Object>(FalseObject())); // Smi == Mint -> false.
230 __ ret();
231
232 __ Bind(&receiver_not_smi);
233 // EAX:: receiver.
234 __ CompareClassId(EAX, kMintCid, EDI);
235 __ j(NOT_EQUAL, normal_ir_body);
236 // Receiver is Mint, return false if right is Smi.
237 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Right argument.
238 __ testl(EAX, Immediate(kSmiTagMask));
239 __ j(NOT_ZERO, normal_ir_body);
240 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
241 __ ret();
242 // TODO(srdjan): Implement Mint == Mint comparison.
243
244 __ Bind(normal_ir_body);
245}
246
247void AsmIntrinsifier::Integer_equal(Assembler* assembler,
248 Label* normal_ir_body) {
249 Integer_equalToInteger(assembler, normal_ir_body);
250}
251
252// Argument is Smi (receiver).
253void AsmIntrinsifier::Smi_bitLength(Assembler* assembler,
254 Label* normal_ir_body) {
255 ASSERT(kSmiTagShift == 1);
256 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Receiver.
257 // XOR with sign bit to complement bits if value is negative.
258 __ movl(ECX, EAX);
259 __ sarl(ECX, Immediate(31)); // All 0 or all 1.
260 __ xorl(EAX, ECX);
261 // BSR does not write the destination register if source is zero. Put a 1 in
262 // the Smi tag bit to ensure BSR writes to destination register.
263 __ orl(EAX, Immediate(kSmiTagMask));
264 __ bsrl(EAX, EAX);
265 __ SmiTag(EAX);
266 __ ret();
267}
268
269void AsmIntrinsifier::Bigint_lsh(Assembler* assembler, Label* normal_ir_body) {
270 // static void _lsh(Uint32List x_digits, int x_used, int n,
271 // Uint32List r_digits)
272
273 // Preserve THR to free ESI.
274 __ pushl(THR);
275 ASSERT(THR == ESI);
276
277 __ movl(EDI, Address(ESP, 5 * target::kWordSize)); // x_digits
278 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // n is Smi
279 __ SmiUntag(ECX);
280 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
281 __ movl(ESI, ECX);
282 __ sarl(ESI, Immediate(5)); // ESI = n ~/ _DIGIT_BITS.
283 __ leal(EBX,
284 FieldAddress(EBX, ESI, TIMES_4, target::TypedData::payload_offset()));
285 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // x_used > 0, Smi.
286 __ SmiUntag(ESI);
287 __ decl(ESI);
288 __ xorl(EAX, EAX); // EAX = 0.
289 __ movl(EDX,
290 FieldAddress(EDI, ESI, TIMES_4, target::TypedData::payload_offset()));
291 __ shldl(EAX, EDX, ECX);
292 __ movl(Address(EBX, ESI, TIMES_4, kBytesPerBigIntDigit), EAX);
293 Label last;
294 __ cmpl(ESI, Immediate(0));
295 __ j(EQUAL, &last, Assembler::kNearJump);
296 Label loop;
297 __ Bind(&loop);
298 __ movl(EAX, EDX);
299 __ movl(EDX, FieldAddress(
300 EDI, ESI, TIMES_4,
301 target::TypedData::payload_offset() - kBytesPerBigIntDigit));
302 __ shldl(EAX, EDX, ECX);
303 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX);
304 __ decl(ESI);
306 __ Bind(&last);
307 __ shldl(EDX, ESI, ECX); // ESI == 0.
308 __ movl(Address(EBX, 0), EDX);
309
310 // Restore THR and return.
311 __ popl(THR);
312 __ LoadObject(EAX, NullObject());
313 __ ret();
314}
315
316void AsmIntrinsifier::Bigint_rsh(Assembler* assembler, Label* normal_ir_body) {
317 // static void _rsh(Uint32List x_digits, int x_used, int n,
318 // Uint32List r_digits)
319
320 // Preserve THR to free ESI.
321 __ pushl(THR);
322 ASSERT(THR == ESI);
323
324 __ movl(EDI, Address(ESP, 5 * target::kWordSize)); // x_digits
325 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // n is Smi
326 __ SmiUntag(ECX);
327 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
328 __ movl(EDX, ECX);
329 __ sarl(EDX, Immediate(5)); // EDX = n ~/ _DIGIT_BITS.
330 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // x_used > 0, Smi.
331 __ SmiUntag(ESI);
332 __ decl(ESI);
333 // EDI = &x_digits[x_used - 1].
334 __ leal(EDI,
335 FieldAddress(EDI, ESI, TIMES_4, target::TypedData::payload_offset()));
336 __ subl(ESI, EDX);
337 // EBX = &r_digits[x_used - 1 - (n ~/ 32)].
338 __ leal(EBX,
339 FieldAddress(EBX, ESI, TIMES_4, target::TypedData::payload_offset()));
340 __ negl(ESI);
341 __ movl(EDX, Address(EDI, ESI, TIMES_4, 0));
342 Label last;
343 __ cmpl(ESI, Immediate(0));
344 __ j(EQUAL, &last, Assembler::kNearJump);
345 Label loop;
346 __ Bind(&loop);
347 __ movl(EAX, EDX);
348 __ movl(EDX, Address(EDI, ESI, TIMES_4, kBytesPerBigIntDigit));
349 __ shrdl(EAX, EDX, ECX);
350 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX);
351 __ incl(ESI);
353 __ Bind(&last);
354 __ shrdl(EDX, ESI, ECX); // ESI == 0.
355 __ movl(Address(EBX, 0), EDX);
356
357 // Restore THR and return.
358 __ popl(THR);
359 __ LoadObject(EAX, NullObject());
360 __ ret();
361}
362
363void AsmIntrinsifier::Bigint_absAdd(Assembler* assembler,
364 Label* normal_ir_body) {
365 // static void _absAdd(Uint32List digits, int used,
366 // Uint32List a_digits, int a_used,
367 // Uint32List r_digits)
368
369 // Preserve THR to free ESI.
370 __ pushl(THR);
371 ASSERT(THR == ESI);
372
373 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // digits
374 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // used is Smi
375 __ SmiUntag(EAX); // used > 0.
376 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
377 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // a_used is Smi
378 __ SmiUntag(ECX); // a_used > 0.
379 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
380
381 // Precompute 'used - a_used' now so that carry flag is not lost later.
382 __ subl(EAX, ECX);
383 __ incl(EAX); // To account for the extra test between loops.
384 __ pushl(EAX);
385
386 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
387 Label add_loop;
388 __ Bind(&add_loop);
389 // Loop a_used times, ECX = a_used, ECX > 0.
390 __ movl(EAX,
391 FieldAddress(EDI, EDX, TIMES_4, target::TypedData::payload_offset()));
392 __ adcl(EAX,
393 FieldAddress(ESI, EDX, TIMES_4, target::TypedData::payload_offset()));
394 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
395 EAX);
396 __ incl(EDX); // Does not affect carry flag.
397 __ decl(ECX); // Does not affect carry flag.
398 __ j(NOT_ZERO, &add_loop, Assembler::kNearJump);
399
400 Label last_carry;
401 __ popl(ECX);
402 __ decl(ECX); // Does not affect carry flag.
403 __ j(ZERO, &last_carry, Assembler::kNearJump); // If used - a_used == 0.
404
405 Label carry_loop;
406 __ Bind(&carry_loop);
407 // Loop used - a_used times, ECX = used - a_used, ECX > 0.
408 __ movl(EAX,
409 FieldAddress(EDI, EDX, TIMES_4, target::TypedData::payload_offset()));
410 __ adcl(EAX, Immediate(0));
411 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
412 EAX);
413 __ incl(EDX); // Does not affect carry flag.
414 __ decl(ECX); // Does not affect carry flag.
415 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
416
417 __ Bind(&last_carry);
418 __ movl(EAX, Immediate(0));
419 __ adcl(EAX, Immediate(0));
420 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
421 EAX);
422
423 // Restore THR and return.
424 __ popl(THR);
425 __ LoadObject(EAX, NullObject());
426 __ ret();
427}
428
429void AsmIntrinsifier::Bigint_absSub(Assembler* assembler,
430 Label* normal_ir_body) {
431 // static void _absSub(Uint32List digits, int used,
432 // Uint32List a_digits, int a_used,
433 // Uint32List r_digits)
434
435 // Preserve THR to free ESI.
436 __ pushl(THR);
437 ASSERT(THR == ESI);
438
439 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // digits
440 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // used is Smi
441 __ SmiUntag(EAX); // used > 0.
442 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
443 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // a_used is Smi
444 __ SmiUntag(ECX); // a_used > 0.
445 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
446
447 // Precompute 'used - a_used' now so that carry flag is not lost later.
448 __ subl(EAX, ECX);
449 __ incl(EAX); // To account for the extra test between loops.
450 __ pushl(EAX);
451
452 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
453 Label sub_loop;
454 __ Bind(&sub_loop);
455 // Loop a_used times, ECX = a_used, ECX > 0.
456 __ movl(EAX,
457 FieldAddress(EDI, EDX, TIMES_4, target::TypedData::payload_offset()));
458 __ sbbl(EAX,
459 FieldAddress(ESI, EDX, TIMES_4, target::TypedData::payload_offset()));
460 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
461 EAX);
462 __ incl(EDX); // Does not affect carry flag.
463 __ decl(ECX); // Does not affect carry flag.
464 __ j(NOT_ZERO, &sub_loop, Assembler::kNearJump);
465
466 Label done;
467 __ popl(ECX);
468 __ decl(ECX); // Does not affect carry flag.
469 __ j(ZERO, &done, Assembler::kNearJump); // If used - a_used == 0.
470
471 Label carry_loop;
472 __ Bind(&carry_loop);
473 // Loop used - a_used times, ECX = used - a_used, ECX > 0.
474 __ movl(EAX,
475 FieldAddress(EDI, EDX, TIMES_4, target::TypedData::payload_offset()));
476 __ sbbl(EAX, Immediate(0));
477 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
478 EAX);
479 __ incl(EDX); // Does not affect carry flag.
480 __ decl(ECX); // Does not affect carry flag.
481 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
482
483 __ Bind(&done);
484 // Restore THR and return.
485 __ popl(THR);
486 __ LoadObject(EAX, NullObject());
487 __ ret();
488}
489
490void AsmIntrinsifier::Bigint_mulAdd(Assembler* assembler,
491 Label* normal_ir_body) {
492 // Pseudo code:
493 // static int _mulAdd(Uint32List x_digits, int xi,
494 // Uint32List m_digits, int i,
495 // Uint32List a_digits, int j, int n) {
496 // uint32_t x = x_digits[xi >> 1]; // xi is Smi.
497 // if (x == 0 || n == 0) {
498 // return 1;
499 // }
500 // uint32_t* mip = &m_digits[i >> 1]; // i is Smi.
501 // uint32_t* ajp = &a_digits[j >> 1]; // j is Smi.
502 // uint32_t c = 0;
503 // SmiUntag(n);
504 // do {
505 // uint32_t mi = *mip++;
506 // uint32_t aj = *ajp;
507 // uint64_t t = x*mi + aj + c; // 32-bit * 32-bit -> 64-bit.
508 // *ajp++ = low32(t);
509 // c = high32(t);
510 // } while (--n > 0);
511 // while (c != 0) {
512 // uint64_t t = *ajp + c;
513 // *ajp++ = low32(t);
514 // c = high32(t); // c == 0 or 1.
515 // }
516 // return 1;
517 // }
518
519 Label no_op;
520 // EBX = x, no_op if x == 0
521 __ movl(ECX, Address(ESP, 7 * target::kWordSize)); // x_digits
522 __ movl(EAX, Address(ESP, 6 * target::kWordSize)); // xi is Smi
523 __ movl(EBX,
524 FieldAddress(ECX, EAX, TIMES_2, target::TypedData::payload_offset()));
525 __ testl(EBX, EBX);
526 __ j(ZERO, &no_op, Assembler::kNearJump);
527
528 // EDX = SmiUntag(n), no_op if n == 0
529 __ movl(EDX, Address(ESP, 1 * target::kWordSize));
530 __ SmiUntag(EDX);
531 __ j(ZERO, &no_op, Assembler::kNearJump);
532
533 // Preserve THR to free ESI.
534 __ pushl(THR);
535 ASSERT(THR == ESI);
536
537 // EDI = mip = &m_digits[i >> 1]
538 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // m_digits
539 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // i is Smi
540 __ leal(EDI,
541 FieldAddress(EDI, EAX, TIMES_2, target::TypedData::payload_offset()));
542
543 // ESI = ajp = &a_digits[j >> 1]
544 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
545 __ movl(EAX, Address(ESP, 3 * target::kWordSize)); // j is Smi
546 __ leal(ESI,
547 FieldAddress(ESI, EAX, TIMES_2, target::TypedData::payload_offset()));
548
549 // Save n
550 __ pushl(EDX);
551 Address n_addr = Address(ESP, 0 * target::kWordSize);
552
553 // ECX = c = 0
554 __ xorl(ECX, ECX);
555
556 Label muladd_loop;
557 __ Bind(&muladd_loop);
558 // x: EBX
559 // mip: EDI
560 // ajp: ESI
561 // c: ECX
562 // t: EDX:EAX (not live at loop entry)
563 // n: ESP[0]
564
565 // uint32_t mi = *mip++
566 __ movl(EAX, Address(EDI, 0));
567 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
568
569 // uint64_t t = x*mi
570 __ mull(EBX); // t = EDX:EAX = EAX * EBX
571 __ addl(EAX, ECX); // t += c
572 __ adcl(EDX, Immediate(0));
573
574 // uint32_t aj = *ajp; t += aj
575 __ addl(EAX, Address(ESI, 0));
576 __ adcl(EDX, Immediate(0));
577
578 // *ajp++ = low32(t)
579 __ movl(Address(ESI, 0), EAX);
580 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
581
582 // c = high32(t)
583 __ movl(ECX, EDX);
584
585 // while (--n > 0)
586 __ decl(n_addr); // --n
587 __ j(NOT_ZERO, &muladd_loop, Assembler::kNearJump);
588
589 Label done;
590 __ testl(ECX, ECX);
592
593 // *ajp += c
594 __ addl(Address(ESI, 0), ECX);
596
597 Label propagate_carry_loop;
598 __ Bind(&propagate_carry_loop);
599 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
600 __ incl(Address(ESI, 0)); // c == 0 or 1
601 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump);
602
603 __ Bind(&done);
604 __ Drop(1); // n
605 // Restore THR and return.
606 __ popl(THR);
607
608 __ Bind(&no_op);
609 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
610 __ ret();
611}
612
613void AsmIntrinsifier::Bigint_sqrAdd(Assembler* assembler,
614 Label* normal_ir_body) {
615 // Pseudo code:
616 // static int _sqrAdd(Uint32List x_digits, int i,
617 // Uint32List a_digits, int used) {
618 // uint32_t* xip = &x_digits[i >> 1]; // i is Smi.
619 // uint32_t x = *xip++;
620 // if (x == 0) return 1;
621 // uint32_t* ajp = &a_digits[i]; // j == 2*i, i is Smi.
622 // uint32_t aj = *ajp;
623 // uint64_t t = x*x + aj;
624 // *ajp++ = low32(t);
625 // uint64_t c = high32(t);
626 // int n = ((used - i) >> 1) - 1; // used and i are Smi.
627 // while (--n >= 0) {
628 // uint32_t xi = *xip++;
629 // uint32_t aj = *ajp;
630 // uint96_t t = 2*x*xi + aj + c; // 2-bit * 32-bit * 32-bit -> 65-bit.
631 // *ajp++ = low32(t);
632 // c = high64(t); // 33-bit.
633 // }
634 // uint32_t aj = *ajp;
635 // uint64_t t = aj + c; // 32-bit + 33-bit -> 34-bit.
636 // *ajp++ = low32(t);
637 // *ajp = high32(t);
638 // return 1;
639 // }
640
641 // EDI = xip = &x_digits[i >> 1]
642 __ movl(EDI, Address(ESP, 4 * target::kWordSize)); // x_digits
643 __ movl(EAX, Address(ESP, 3 * target::kWordSize)); // i is Smi
644 __ leal(EDI,
645 FieldAddress(EDI, EAX, TIMES_2, target::TypedData::payload_offset()));
646
647 // EBX = x = *xip++, return if x == 0
648 Label x_zero;
649 __ movl(EBX, Address(EDI, 0));
650 __ cmpl(EBX, Immediate(0));
651 __ j(EQUAL, &x_zero, Assembler::kNearJump);
652 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
653
654 // Preserve THR to free ESI.
655 __ pushl(THR);
656 ASSERT(THR == ESI);
657
658 // ESI = ajp = &a_digits[i]
659 __ movl(ESI, Address(ESP, 3 * target::kWordSize)); // a_digits
660 __ leal(ESI,
661 FieldAddress(ESI, EAX, TIMES_4, target::TypedData::payload_offset()));
662
663 // EDX:EAX = t = x*x + *ajp
664 __ movl(EAX, EBX);
665 __ mull(EBX);
666 __ addl(EAX, Address(ESI, 0));
667 __ adcl(EDX, Immediate(0));
668
669 // *ajp++ = low32(t)
670 __ movl(Address(ESI, 0), EAX);
671 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
672
673 // int n = used - i - 1
674 __ movl(EAX, Address(ESP, 2 * target::kWordSize)); // used is Smi
675 __ subl(EAX, Address(ESP, 4 * target::kWordSize)); // i is Smi
676 __ SmiUntag(EAX);
677 __ decl(EAX);
678 __ pushl(EAX); // Save n on stack.
679
680 // uint64_t c = high32(t)
681 __ pushl(Immediate(0)); // push high32(c) == 0
682 __ pushl(EDX); // push low32(c) == high32(t)
683
684 Address n_addr = Address(ESP, 2 * target::kWordSize);
685 Address ch_addr = Address(ESP, 1 * target::kWordSize);
686 Address cl_addr = Address(ESP, 0 * target::kWordSize);
687
688 Label loop, done;
689 __ Bind(&loop);
690 // x: EBX
691 // xip: EDI
692 // ajp: ESI
693 // c: ESP[1]:ESP[0]
694 // t: ECX:EDX:EAX (not live at loop entry)
695 // n: ESP[2]
696
697 // while (--n >= 0)
698 __ decl(Address(ESP, 2 * target::kWordSize)); // --n
700
701 // uint32_t xi = *xip++
702 __ movl(EAX, Address(EDI, 0));
703 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
704
705 // uint96_t t = ECX:EDX:EAX = 2*x*xi + aj + c
706 __ mull(EBX); // EDX:EAX = EAX * EBX
707 __ xorl(ECX, ECX); // ECX = 0
708 __ shldl(ECX, EDX, Immediate(1));
709 __ shldl(EDX, EAX, Immediate(1));
710 __ shll(EAX, Immediate(1)); // ECX:EDX:EAX <<= 1
711 __ addl(EAX, Address(ESI, 0)); // t += aj
712 __ adcl(EDX, Immediate(0));
713 __ adcl(ECX, Immediate(0));
714 __ addl(EAX, cl_addr); // t += low32(c)
715 __ adcl(EDX, ch_addr); // t += high32(c) << 32
716 __ adcl(ECX, Immediate(0));
717
718 // *ajp++ = low32(t)
719 __ movl(Address(ESI, 0), EAX);
720 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
721
722 // c = high64(t)
723 __ movl(cl_addr, EDX);
724 __ movl(ch_addr, ECX);
725
726 __ jmp(&loop, Assembler::kNearJump);
727
728 __ Bind(&done);
729 // uint64_t t = aj + c
730 __ movl(EAX, cl_addr); // t = c
731 __ movl(EDX, ch_addr);
732 __ addl(EAX, Address(ESI, 0)); // t += *ajp
733 __ adcl(EDX, Immediate(0));
734
735 // *ajp++ = low32(t)
736 // *ajp = high32(t)
737 __ movl(Address(ESI, 0), EAX);
738 __ movl(Address(ESI, kBytesPerBigIntDigit), EDX);
739
740 // Restore THR and return.
741 __ Drop(3);
742 __ popl(THR);
743 __ Bind(&x_zero);
744 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
745 __ ret();
746}
747
748void AsmIntrinsifier::Bigint_estimateQuotientDigit(Assembler* assembler,
749 Label* normal_ir_body) {
750 // Pseudo code:
751 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) {
752 // uint32_t yt = args[_YT]; // _YT == 1.
753 // uint32_t* dp = &digits[i >> 1]; // i is Smi.
754 // uint32_t dh = dp[0]; // dh == digits[i >> 1].
755 // uint32_t qd;
756 // if (dh == yt) {
757 // qd = DIGIT_MASK;
758 // } else {
759 // dl = dp[-1]; // dl == digits[(i - 1) >> 1].
760 // qd = dh:dl / yt; // No overflow possible, because dh < yt.
761 // }
762 // args[_QD] = qd; // _QD == 2.
763 // return 1;
764 // }
765
766 // EDI = args
767 __ movl(EDI, Address(ESP, 3 * target::kWordSize)); // args
768
769 // ECX = yt = args[1]
770 __ movl(ECX, FieldAddress(EDI, target::TypedData::payload_offset() +
772
773 // EBX = dp = &digits[i >> 1]
774 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // digits
775 __ movl(EAX, Address(ESP, 1 * target::kWordSize)); // i is Smi
776 __ leal(EBX,
777 FieldAddress(EBX, EAX, TIMES_2, target::TypedData::payload_offset()));
778
779 // EDX = dh = dp[0]
780 __ movl(EDX, Address(EBX, 0));
781
782 // EAX = qd = DIGIT_MASK = -1
783 __ movl(EAX, Immediate(-1));
784
785 // Return qd if dh == yt
786 Label return_qd;
787 __ cmpl(EDX, ECX);
788 __ j(EQUAL, &return_qd, Assembler::kNearJump);
789
790 // EAX = dl = dp[-1]
791 __ movl(EAX, Address(EBX, -kBytesPerBigIntDigit));
792
793 // EAX = qd = dh:dl / yt = EDX:EAX / ECX
794 __ divl(ECX);
795
796 __ Bind(&return_qd);
797 // args[2] = qd
798 __ movl(FieldAddress(EDI, target::TypedData::payload_offset() +
800 EAX);
801
802 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
803 __ ret();
804}
805
806void AsmIntrinsifier::Montgomery_mulMod(Assembler* assembler,
807 Label* normal_ir_body) {
808 // Pseudo code:
809 // static int _mulMod(Uint32List args, Uint32List digits, int i) {
810 // uint32_t rho = args[_RHO]; // _RHO == 2.
811 // uint32_t d = digits[i >> 1]; // i is Smi.
812 // uint64_t t = rho*d;
813 // args[_MU] = t mod DIGIT_BASE; // _MU == 4.
814 // return 1;
815 // }
816
817 // EDI = args
818 __ movl(EDI, Address(ESP, 3 * target::kWordSize)); // args
819
820 // ECX = rho = args[2]
821 __ movl(ECX, FieldAddress(EDI, target::TypedData::payload_offset() +
823
824 // EAX = digits[i >> 1]
825 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // digits
826 __ movl(EAX, Address(ESP, 1 * target::kWordSize)); // i is Smi
827 __ movl(EAX,
828 FieldAddress(EBX, EAX, TIMES_2, target::TypedData::payload_offset()));
829
830 // EDX:EAX = t = rho*d
831 __ mull(ECX);
832
833 // args[4] = t mod DIGIT_BASE = low32(t)
834 __ movl(FieldAddress(EDI, target::TypedData::payload_offset() +
836 EAX);
837
838 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
839 __ ret();
840}
841
842// Check if the last argument is a double, jump to label 'is_smi' if smi
843// (easy to convert to double), otherwise jump to label 'not_double_smi',
844// Returns the last argument in EAX.
845static void TestLastArgumentIsDouble(Assembler* assembler,
846 Label* is_smi,
847 Label* not_double_smi) {
848 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
849 __ testl(EAX, Immediate(kSmiTagMask));
850 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi.
851 __ CompareClassId(EAX, kDoubleCid, EBX);
852 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump);
853 // Fall through if double.
854}
855
856// Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown
857// type. Return true or false object in the register EAX. Any NaN argument
858// returns false. Any non-double arg1 causes control flow to fall through to the
859// slow case (compiled method body).
860static void CompareDoubles(Assembler* assembler,
861 Label* normal_ir_body,
862 Condition true_condition) {
863 Label is_false, is_true, is_smi, double_op;
864 TestLastArgumentIsDouble(assembler, &is_smi, normal_ir_body);
865 // Both arguments are double, right operand is in EAX.
866 __ movsd(XMM1, FieldAddress(EAX, target::Double::value_offset()));
867 __ Bind(&double_op);
868 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Left argument.
869 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
870 __ comisd(XMM0, XMM1);
871 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false;
872 __ j(true_condition, &is_true, Assembler::kNearJump);
873 // Fall through false.
874 __ Bind(&is_false);
875 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
876 __ ret();
877 __ Bind(&is_true);
878 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
879 __ ret();
880 __ Bind(&is_smi);
881 __ SmiUntag(EAX);
882 __ cvtsi2sd(XMM1, EAX);
883 __ jmp(&double_op);
884 __ Bind(normal_ir_body);
885}
886
887// arg0 is Double, arg1 is unknown.
888void AsmIntrinsifier::Double_greaterThan(Assembler* assembler,
889 Label* normal_ir_body) {
890 CompareDoubles(assembler, normal_ir_body, ABOVE);
891}
892
893// arg0 is Double, arg1 is unknown.
894void AsmIntrinsifier::Double_greaterEqualThan(Assembler* assembler,
895 Label* normal_ir_body) {
896 CompareDoubles(assembler, normal_ir_body, ABOVE_EQUAL);
897}
898
899// arg0 is Double, arg1 is unknown.
900void AsmIntrinsifier::Double_lessThan(Assembler* assembler,
901 Label* normal_ir_body) {
902 CompareDoubles(assembler, normal_ir_body, BELOW);
903}
904
905// arg0 is Double, arg1 is unknown.
906void AsmIntrinsifier::Double_equal(Assembler* assembler,
907 Label* normal_ir_body) {
908 CompareDoubles(assembler, normal_ir_body, EQUAL);
909}
910
911// arg0 is Double, arg1 is unknown.
912void AsmIntrinsifier::Double_lessEqualThan(Assembler* assembler,
913 Label* normal_ir_body) {
914 CompareDoubles(assembler, normal_ir_body, BELOW_EQUAL);
915}
916
917// Expects left argument to be double (receiver). Right argument is unknown.
918// Both arguments are on stack.
919static void DoubleArithmeticOperations(Assembler* assembler,
920 Label* normal_ir_body,
921 Token::Kind kind) {
922 Label is_smi, double_op;
923 TestLastArgumentIsDouble(assembler, &is_smi, normal_ir_body);
924 // Both arguments are double, right operand is in EAX.
925 __ movsd(XMM1, FieldAddress(EAX, target::Double::value_offset()));
926 __ Bind(&double_op);
927 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Left argument.
928 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
929 switch (kind) {
930 case Token::kADD:
931 __ addsd(XMM0, XMM1);
932 break;
933 case Token::kSUB:
934 __ subsd(XMM0, XMM1);
935 break;
936 case Token::kMUL:
937 __ mulsd(XMM0, XMM1);
938 break;
939 case Token::kDIV:
940 __ divsd(XMM0, XMM1);
941 break;
942 default:
943 UNREACHABLE();
944 }
945 const Class& double_class = DoubleClass();
946 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
947 EAX, // Result register.
948 EBX);
949 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
950 __ ret();
951 __ Bind(&is_smi);
952 __ SmiUntag(EAX);
953 __ cvtsi2sd(XMM1, EAX);
954 __ jmp(&double_op);
955 __ Bind(normal_ir_body);
956}
957
958void AsmIntrinsifier::Double_add(Assembler* assembler, Label* normal_ir_body) {
959 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kADD);
960}
961
962void AsmIntrinsifier::Double_mul(Assembler* assembler, Label* normal_ir_body) {
963 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kMUL);
964}
965
966void AsmIntrinsifier::Double_sub(Assembler* assembler, Label* normal_ir_body) {
967 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kSUB);
968}
969
970void AsmIntrinsifier::Double_div(Assembler* assembler, Label* normal_ir_body) {
971 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kDIV);
972}
973
974// Left is double, right is integer (Mint or Smi)
975void AsmIntrinsifier::Double_mulFromInteger(Assembler* assembler,
976 Label* normal_ir_body) {
977 // Only smis allowed.
978 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
979 __ testl(EAX, Immediate(kSmiTagMask));
980 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump);
981 // Is Smi.
982 __ SmiUntag(EAX);
983 __ cvtsi2sd(XMM1, EAX);
984 __ movl(EAX, Address(ESP, +2 * target::kWordSize));
985 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
986 __ mulsd(XMM0, XMM1);
987 const Class& double_class = DoubleClass();
988 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
989 EAX, // Result register.
990 EBX);
991 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
992 __ ret();
993 __ Bind(normal_ir_body);
994}
995
996void AsmIntrinsifier::DoubleFromInteger(Assembler* assembler,
997 Label* normal_ir_body) {
998 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
999 __ testl(EAX, Immediate(kSmiTagMask));
1000 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump);
1001 // Is Smi.
1002 __ SmiUntag(EAX);
1003 __ cvtsi2sd(XMM0, EAX);
1004 const Class& double_class = DoubleClass();
1005 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
1006 EAX, // Result register.
1007 EBX);
1008 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
1009 __ ret();
1010 __ Bind(normal_ir_body);
1011}
1012
1013void AsmIntrinsifier::Double_getIsNaN(Assembler* assembler,
1014 Label* normal_ir_body) {
1015 Label is_true;
1016 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1017 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
1018 __ comisd(XMM0, XMM0);
1019 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true;
1020 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1021 __ ret();
1022 __ Bind(&is_true);
1023 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1024 __ ret();
1025}
1026
1027void AsmIntrinsifier::Double_getIsInfinite(Assembler* assembler,
1028 Label* normal_ir_body) {
1029 Label not_inf;
1030 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1031 __ movl(EBX, FieldAddress(EAX, target::Double::value_offset()));
1032
1033 // If the low word isn't zero, then it isn't infinity.
1034 __ cmpl(EBX, Immediate(0));
1035 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1036 // Check the high word.
1037 __ movl(EBX, FieldAddress(
1038 EAX, target::Double::value_offset() + target::kWordSize));
1039 // Mask off sign bit.
1040 __ andl(EBX, Immediate(0x7FFFFFFF));
1041 // Compare with +infinity.
1042 __ cmpl(EBX, Immediate(0x7FF00000));
1043 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1044 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1045 __ ret();
1046
1047 __ Bind(&not_inf);
1048 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1049 __ ret();
1050}
1051
1052void AsmIntrinsifier::Double_getIsNegative(Assembler* assembler,
1053 Label* normal_ir_body) {
1054 Label is_false, is_true, is_zero;
1055 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1056 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
1057 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1.
1058 __ comisd(XMM0, XMM1);
1059 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false.
1060 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero.
1061 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false.
1062 __ Bind(&is_true);
1063 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1064 __ ret();
1065 __ Bind(&is_false);
1066 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1067 __ ret();
1068 __ Bind(&is_zero);
1069 // Check for negative zero (get the sign bit).
1070 __ movmskpd(EAX, XMM0);
1071 __ testl(EAX, Immediate(1));
1072 __ j(NOT_ZERO, &is_true, Assembler::kNearJump);
1073 __ jmp(&is_false, Assembler::kNearJump);
1074}
1075
1076// Identity comparison.
1077void AsmIntrinsifier::ObjectEquals(Assembler* assembler,
1078 Label* normal_ir_body) {
1079 Label is_true;
1080 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1081 __ cmpl(EAX, Address(ESP, +2 * target::kWordSize));
1082 __ j(EQUAL, &is_true, Assembler::kNearJump);
1083 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1084 __ ret();
1085 __ Bind(&is_true);
1086 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1087 __ ret();
1088}
1089
1090static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) {
1091 assembler->RangeCheck(cid, kNoRegister, kSmiCid, kMintCid,
1092 Assembler::kIfInRange, target);
1093}
1094
1095static void JumpIfNotInteger(Assembler* assembler,
1096 Register cid,
1097 Label* target) {
1098 assembler->RangeCheck(cid, kNoRegister, kSmiCid, kMintCid,
1099 Assembler::kIfNotInRange, target);
1100}
1101
1102static void JumpIfString(Assembler* assembler, Register cid, Label* target) {
1103 assembler->RangeCheck(cid, kNoRegister, kOneByteStringCid, kTwoByteStringCid,
1104 Assembler::kIfInRange, target);
1105}
1106
1107static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) {
1108 assembler->RangeCheck(cid, kNoRegister, kOneByteStringCid, kTwoByteStringCid,
1109 Assembler::kIfNotInRange, target);
1110}
1111
1112static void JumpIfNotList(Assembler* assembler, Register cid, Label* target) {
1113 assembler->RangeCheck(cid, kNoRegister, kArrayCid, kGrowableObjectArrayCid,
1114 Assembler::kIfNotInRange, target);
1115}
1116
1117static void JumpIfType(Assembler* assembler, Register cid, Label* target) {
1118 COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
1119 (kRecordTypeCid == kTypeCid + 2));
1120 assembler->RangeCheck(cid, kNoRegister, kTypeCid, kRecordTypeCid,
1121 Assembler::kIfInRange, target);
1122}
1123
1124static void JumpIfNotType(Assembler* assembler, Register cid, Label* target) {
1125 COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
1126 (kRecordTypeCid == kTypeCid + 2));
1127 assembler->RangeCheck(cid, kNoRegister, kTypeCid, kRecordTypeCid,
1128 Assembler::kIfNotInRange, target);
1129}
1130
1131// Return type quickly for simple types (not parameterized and not signature).
1132void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
1133 Label* normal_ir_body) {
1134 Label use_declaration_type, not_double, not_integer, not_string;
1135 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1136 __ LoadClassIdMayBeSmi(EDI, EAX);
1137
1138 __ cmpl(EDI, Immediate(kClosureCid));
1139 __ j(EQUAL, normal_ir_body); // Instance is a closure.
1140
1141 __ cmpl(EDI, Immediate(kRecordCid));
1142 __ j(EQUAL, normal_ir_body); // Instance is a record.
1143
1144 __ cmpl(EDI, Immediate(kNumPredefinedCids));
1145 __ j(ABOVE, &use_declaration_type);
1146
1147 // If object is a instance of _Double return double type.
1148 __ cmpl(EDI, Immediate(kDoubleCid));
1149 __ j(NOT_EQUAL, &not_double);
1150
1151 __ LoadIsolateGroup(EAX);
1152 __ movl(EAX, Address(EAX, target::IsolateGroup::object_store_offset()));
1153 __ movl(EAX, Address(EAX, target::ObjectStore::double_type_offset()));
1154 __ ret();
1155
1156 __ Bind(&not_double);
1157 // If object is an integer (smi, mint or bigint) return int type.
1158 __ movl(EAX, EDI);
1159 JumpIfNotInteger(assembler, EAX, &not_integer);
1160
1161 __ LoadIsolateGroup(EAX);
1162 __ movl(EAX, Address(EAX, target::IsolateGroup::object_store_offset()));
1163 __ movl(EAX, Address(EAX, target::ObjectStore::int_type_offset()));
1164 __ ret();
1165
1166 __ Bind(&not_integer);
1167 // If object is a string (one byte, two byte or external variants) return
1168 // string type.
1169 __ movl(EAX, EDI);
1170 JumpIfNotString(assembler, EAX, &not_string);
1171
1172 __ LoadIsolateGroup(EAX);
1173 __ movl(EAX, Address(EAX, target::IsolateGroup::object_store_offset()));
1174 __ movl(EAX, Address(EAX, target::ObjectStore::string_type_offset()));
1175 __ ret();
1176
1177 __ Bind(&not_string);
1178 // If object is a type or function type, return Dart type.
1179 __ movl(EAX, EDI);
1180 JumpIfNotType(assembler, EAX, &use_declaration_type);
1181
1182 __ LoadIsolateGroup(EAX);
1183 __ movl(EAX, Address(EAX, target::IsolateGroup::object_store_offset()));
1184 __ movl(EAX, Address(EAX, target::ObjectStore::type_type_offset()));
1185 __ ret();
1186
1187 // Object is neither double, nor integer, nor string, nor type.
1188 __ Bind(&use_declaration_type);
1189 __ LoadClassById(EBX, EDI);
1190 __ movzxw(EDI, FieldAddress(EBX, target::Class::num_type_arguments_offset()));
1191 __ cmpl(EDI, Immediate(0));
1192 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1193 __ movl(EAX, FieldAddress(EBX, target::Class::declaration_type_offset()));
1194 __ CompareObject(EAX, NullObject());
1195 __ j(EQUAL, normal_ir_body, Assembler::kNearJump); // Not yet set.
1196 __ ret();
1197
1198 __ Bind(normal_ir_body);
1199}
1200
1201// Compares cid1 and cid2 to see if they're syntactically equivalent. If this
1202// can be determined by this fast path, it jumps to either equal_* or not_equal.
1203// If classes are equivalent but may be generic, then jumps to
1204// equal_may_be_generic. Clobbers scratch.
1205static void EquivalentClassIds(Assembler* assembler,
1206 Label* normal_ir_body,
1207 Label* equal_may_be_generic,
1208 Label* equal_not_generic,
1209 Label* not_equal,
1210 Register cid1,
1211 Register cid2,
1212 Register scratch,
1213 bool testing_instance_cids) {
1214 Label not_integer, not_integer_or_string, not_integer_or_string_or_list;
1215
1216 // Check if left hand side is a closure. Closures are handled in the runtime.
1217 __ cmpl(cid1, Immediate(kClosureCid));
1218 __ j(EQUAL, normal_ir_body);
1219
1220 // Check if left hand side is a record. Records are handled in the runtime.
1221 __ cmpl(cid1, Immediate(kRecordCid));
1222 __ j(EQUAL, normal_ir_body);
1223
1224 // Check whether class ids match. If class ids don't match types may still be
1225 // considered equivalent (e.g. multiple string implementation classes map to a
1226 // single String type).
1227 __ cmpl(cid1, cid2);
1228 __ j(EQUAL, equal_may_be_generic);
1229
1230 // Class ids are different. Check if we are comparing two string types (with
1231 // different representations), two integer types, two list types or two type
1232 // types.
1233 __ cmpl(cid1, Immediate(kNumPredefinedCids));
1234 __ j(ABOVE_EQUAL, not_equal);
1235
1236 // Check if both are integer types.
1237 __ movl(scratch, cid1);
1238 JumpIfNotInteger(assembler, scratch, &not_integer);
1239
1240 // First type is an integer. Check if the second is an integer too.
1241 __ movl(scratch, cid2);
1242 JumpIfInteger(assembler, scratch, equal_not_generic);
1243 // Integer types are only equivalent to other integer types.
1244 __ jmp(not_equal);
1245
1246 __ Bind(&not_integer);
1247 // Check if both are String types.
1248 __ movl(scratch, cid1);
1249 JumpIfNotString(assembler, scratch,
1250 testing_instance_cids ? &not_integer_or_string : not_equal);
1251
1252 // First type is a String. Check if the second is a String too.
1253 __ movl(scratch, cid2);
1254 JumpIfString(assembler, scratch, equal_not_generic);
1255 // String types are only equivalent to other String types.
1256 __ jmp(not_equal);
1257
1258 if (testing_instance_cids) {
1259 __ Bind(&not_integer_or_string);
1260 // Check if both are List types.
1261 __ movl(scratch, cid1);
1262 JumpIfNotList(assembler, scratch, &not_integer_or_string_or_list);
1263
1264 // First type is a List. Check if the second is a List too.
1265 __ movl(scratch, cid2);
1266 JumpIfNotList(assembler, scratch, not_equal);
1267 ASSERT(compiler::target::Array::type_arguments_offset() ==
1268 compiler::target::GrowableObjectArray::type_arguments_offset());
1269 __ jmp(equal_may_be_generic);
1270
1271 __ Bind(&not_integer_or_string_or_list);
1272 // Check if the first type is a Type. If it is not then types are not
1273 // equivalent because they have different class ids and they are not String
1274 // or integer or List or Type.
1275 __ movl(scratch, cid1);
1276 JumpIfNotType(assembler, scratch, not_equal);
1277
1278 // First type is a Type. Check if the second is a Type too.
1279 __ movl(scratch, cid2);
1280 JumpIfType(assembler, scratch, equal_not_generic);
1281 // Type types are only equivalent to other Type types.
1282 __ jmp(not_equal);
1283 }
1284}
1285
1286void AsmIntrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler,
1287 Label* normal_ir_body) {
1288 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1289 __ LoadClassIdMayBeSmi(EDI, EAX);
1290
1291 __ movl(EAX, Address(ESP, +2 * target::kWordSize));
1292 __ LoadClassIdMayBeSmi(EBX, EAX);
1293
1294 Label equal_may_be_generic, equal, not_equal;
1295 EquivalentClassIds(assembler, normal_ir_body, &equal_may_be_generic, &equal,
1296 &not_equal, EDI, EBX, EAX,
1297 /* testing_instance_cids = */ true);
1298
1299 __ Bind(&equal_may_be_generic);
1300 // Classes are equivalent and neither is a closure class.
1301 // Check if there are no type arguments. In this case we can return true.
1302 // Otherwise fall through into the runtime to handle comparison.
1303 __ LoadClassById(EAX, EDI);
1304 __ movl(
1305 EAX,
1306 FieldAddress(
1307 EAX,
1308 target::Class::host_type_arguments_field_offset_in_words_offset()));
1309 __ cmpl(EAX, Immediate(target::Class::kNoTypeArguments));
1310 __ j(EQUAL, &equal);
1311
1312 // Compare type arguments, host_type_arguments_field_offset_in_words in EAX.
1313 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1314 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1315 __ movl(EDI, FieldAddress(EDI, EAX, TIMES_4, 0));
1316 __ movl(EBX, FieldAddress(EBX, EAX, TIMES_4, 0));
1317 __ cmpl(EDI, EBX);
1318 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1319 // Fall through to equal case if type arguments are equal.
1320
1321 __ Bind(&equal);
1322 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1323 __ ret();
1324
1325 __ Bind(&not_equal);
1326 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1327 __ ret();
1328
1329 __ Bind(normal_ir_body);
1330}
1331
1332void AsmIntrinsifier::String_getHashCode(Assembler* assembler,
1333 Label* normal_ir_body) {
1334 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // String object.
1335 __ movl(EAX, FieldAddress(EAX, target::String::hash_offset()));
1336 __ cmpl(EAX, Immediate(0));
1337 __ j(EQUAL, normal_ir_body, Assembler::kNearJump);
1338 __ ret();
1339 __ Bind(normal_ir_body);
1340 // Hash not yet computed.
1341}
1342
1343void AsmIntrinsifier::Type_equality(Assembler* assembler,
1344 Label* normal_ir_body) {
1345 Label equal, not_equal, equiv_cids_may_be_generic, equiv_cids, check_legacy;
1346
1347 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1348 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1349 __ cmpl(EDI, EBX);
1350 __ j(EQUAL, &equal);
1351
1352 // EDI might not be a Type object, so check that first (EBX should be though,
1353 // since this is a method on the Type class).
1354 __ LoadClassIdMayBeSmi(EAX, EDI);
1355 __ cmpl(EAX, Immediate(kTypeCid));
1356 __ j(NOT_EQUAL, normal_ir_body);
1357
1358 // Check if types are syntactically equal.
1359 __ LoadTypeClassId(ECX, EDI);
1360 __ LoadTypeClassId(EDX, EBX);
1361 // We are not testing instance cids, but type class cids of Type instances.
1362 EquivalentClassIds(assembler, normal_ir_body, &equiv_cids_may_be_generic,
1363 &equiv_cids, &not_equal, ECX, EDX, EAX,
1364 /* testing_instance_cids = */ false);
1365
1366 __ Bind(&equiv_cids_may_be_generic);
1367 // Compare type arguments in Type instances.
1368 __ movl(ECX, FieldAddress(EDI, target::Type::arguments_offset()));
1369 __ movl(EDX, FieldAddress(EBX, target::Type::arguments_offset()));
1370 __ cmpl(ECX, EDX);
1371 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1372 // Fall through to check nullability if type arguments are equal.
1373
1374 // Check nullability.
1375 __ Bind(&equiv_cids);
1376 __ LoadAbstractTypeNullability(EDI, EDI);
1377 __ LoadAbstractTypeNullability(EBX, EBX);
1378 __ cmpl(EDI, EBX);
1379 __ j(NOT_EQUAL, &check_legacy, Assembler::kNearJump);
1380 // Fall through to equal case if nullability is strictly equal.
1381
1382 __ Bind(&equal);
1383 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1384 __ ret();
1385
1386 // At this point the nullabilities are different, so they can only be
1387 // syntactically equivalent if they're both either kNonNullable or kLegacy.
1388 // These are the two largest values of the enum, so we can just do a < check.
1389 ASSERT(target::Nullability::kNullable < target::Nullability::kNonNullable &&
1390 target::Nullability::kNonNullable < target::Nullability::kLegacy);
1391 __ Bind(&check_legacy);
1392 __ cmpl(EDI, Immediate(target::Nullability::kNonNullable));
1393 __ j(LESS, &not_equal, Assembler::kNearJump);
1394 __ cmpl(EBX, Immediate(target::Nullability::kNonNullable));
1396
1397 __ Bind(&not_equal);
1398 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1399 __ ret();
1400
1401 __ Bind(normal_ir_body);
1402}
1403
1404void AsmIntrinsifier::AbstractType_getHashCode(Assembler* assembler,
1405 Label* normal_ir_body) {
1406 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // AbstractType object.
1407 __ movl(EAX, FieldAddress(EAX, target::AbstractType::hash_offset()));
1408 __ testl(EAX, EAX);
1409 __ j(EQUAL, normal_ir_body, Assembler::kNearJump);
1410 __ ret();
1411 __ Bind(normal_ir_body);
1412 // Hash not yet computed.
1413}
1414
1415void AsmIntrinsifier::AbstractType_equality(Assembler* assembler,
1416 Label* normal_ir_body) {
1417 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1418 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1419 __ cmpl(EDI, EBX);
1420 __ j(NOT_EQUAL, normal_ir_body);
1421
1422 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1423 __ ret();
1424
1425 __ Bind(normal_ir_body);
1426}
1427
1428// bool _substringMatches(int start, String other)
1429void AsmIntrinsifier::StringBaseSubstringMatches(Assembler* assembler,
1430 Label* normal_ir_body) {
1431 // For precompilation, not implemented on IA32.
1432}
1433
1434void AsmIntrinsifier::Object_getHash(Assembler* assembler,
1435 Label* normal_ir_body) {
1436 UNREACHABLE();
1437}
1438
1439void AsmIntrinsifier::StringBaseCharAt(Assembler* assembler,
1440 Label* normal_ir_body) {
1441 Label try_two_byte_string;
1442 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Index.
1443 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // String.
1444 __ testl(EBX, Immediate(kSmiTagMask));
1445 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump); // Non-smi index.
1446 // Range check.
1447 __ cmpl(EBX, FieldAddress(EAX, target::String::length_offset()));
1448 // Runtime throws exception.
1449 __ j(ABOVE_EQUAL, normal_ir_body, Assembler::kNearJump);
1450 __ CompareClassId(EAX, kOneByteStringCid, EDI);
1451 __ j(NOT_EQUAL, &try_two_byte_string, Assembler::kNearJump);
1452 __ SmiUntag(EBX);
1453 __ movzxb(EBX, FieldAddress(EAX, EBX, TIMES_1,
1454 target::OneByteString::data_offset()));
1455 __ cmpl(EBX, Immediate(target::Symbols::kNumberOfOneCharCodeSymbols));
1456 __ j(GREATER_EQUAL, normal_ir_body);
1457 __ movl(EAX, Immediate(SymbolsPredefinedAddress()));
1458 __ movl(EAX, Address(EAX, EBX, TIMES_4,
1459 target::Symbols::kNullCharCodeSymbolOffset *
1460 target::kWordSize));
1461 __ ret();
1462
1463 __ Bind(&try_two_byte_string);
1464 __ CompareClassId(EAX, kTwoByteStringCid, EDI);
1465 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1466 ASSERT(kSmiTagShift == 1);
1467 __ movzxw(EBX, FieldAddress(EAX, EBX, TIMES_1,
1468 target::TwoByteString::data_offset()));
1469 __ cmpl(EBX, Immediate(target::Symbols::kNumberOfOneCharCodeSymbols));
1470 __ j(GREATER_EQUAL, normal_ir_body);
1471 __ movl(EAX, Immediate(SymbolsPredefinedAddress()));
1472 __ movl(EAX, Address(EAX, EBX, TIMES_4,
1473 target::Symbols::kNullCharCodeSymbolOffset *
1474 target::kWordSize));
1475 __ ret();
1476
1477 __ Bind(normal_ir_body);
1478}
1479
1480void AsmIntrinsifier::StringBaseIsEmpty(Assembler* assembler,
1481 Label* normal_ir_body) {
1482 Label is_true;
1483 // Get length.
1484 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // String object.
1485 __ movl(EAX, FieldAddress(EAX, target::String::length_offset()));
1486 __ cmpl(EAX, Immediate(target::ToRawSmi(0)));
1487 __ j(EQUAL, &is_true, Assembler::kNearJump);
1488 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1489 __ ret();
1490 __ Bind(&is_true);
1491 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1492 __ ret();
1493}
1494
1495void AsmIntrinsifier::OneByteString_getHashCode(Assembler* assembler,
1496 Label* normal_ir_body) {
1497 Label compute_hash;
1498 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // OneByteString object.
1499 __ movl(EAX, FieldAddress(EBX, target::String::hash_offset()));
1500 __ cmpl(EAX, Immediate(0));
1501 __ j(EQUAL, &compute_hash, Assembler::kNearJump);
1502 __ ret();
1503
1504 __ Bind(&compute_hash);
1505 // Hash not yet computed, use algorithm of class StringHasher.
1506 __ movl(ECX, FieldAddress(EBX, target::String::length_offset()));
1507 __ SmiUntag(ECX);
1508 __ xorl(EAX, EAX);
1509 __ xorl(EDI, EDI);
1510 // EBX: Instance of OneByteString.
1511 // ECX: String length, untagged integer.
1512 // EDI: Loop counter, untagged integer.
1513 // EAX: Hash code, untagged integer.
1514 Label loop, done;
1515 __ Bind(&loop);
1516 __ cmpl(EDI, ECX);
1518 // Add to hash code: (hash_ is uint32)
1519 // Get one characters (ch).
1520 __ movzxb(EDX, FieldAddress(EBX, EDI, TIMES_1,
1521 target::OneByteString::data_offset()));
1522 // EDX: ch and temporary.
1524
1525 __ incl(EDI);
1526 __ jmp(&loop, Assembler::kNearJump);
1527
1528 __ Bind(&done);
1529 // Finalize and fit to size kHashBits. Ensures hash is non-zero.
1530 __ FinalizeHashForSize(target::String::kHashBits, EAX, EDX);
1531 __ SmiTag(EAX);
1532 __ StoreIntoSmiField(FieldAddress(EBX, target::String::hash_offset()), EAX);
1533 __ ret();
1534}
1535
1536// Allocates a _OneByteString or _TwoByteString. The content is not initialized.
1537// 'length_reg' contains the desired length as a _Smi or _Mint.
1538// Returns new string as tagged pointer in EAX.
1539static void TryAllocateString(Assembler* assembler,
1540 classid_t cid,
1541 intptr_t max_elements,
1542 Label* ok,
1543 Label* failure,
1544 Register length_reg) {
1545 ASSERT(cid == kOneByteStringCid || cid == kTwoByteStringCid);
1546 // _Mint length: call to runtime to produce error.
1547 __ BranchIfNotSmi(length_reg, failure);
1548 // negative length: call to runtime to produce error.
1549 // Too big: call to runtime to allocate old.
1550 __ cmpl(length_reg, Immediate(target::ToRawSmi(max_elements)));
1551 __ j(ABOVE, failure);
1552
1553 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, failure, EAX));
1554 if (length_reg != EDI) {
1555 __ movl(EDI, length_reg);
1556 }
1557 Label pop_and_fail;
1558 __ pushl(EDI); // Preserve length.
1559 if (cid == kOneByteStringCid) {
1560 __ SmiUntag(EDI);
1561 } else {
1562 // Untag length and multiply by element size -> no-op.
1563 }
1564 const intptr_t fixed_size_plus_alignment_padding =
1565 target::String::InstanceSize() +
1567 __ leal(EDI, Address(EDI, TIMES_1,
1568 fixed_size_plus_alignment_padding)); // EDI is untagged.
1570
1571 __ movl(EAX, Address(THR, target::Thread::top_offset()));
1572 __ movl(EBX, EAX);
1573
1574 // EDI: allocation size.
1575 __ addl(EBX, EDI);
1576 __ j(CARRY, &pop_and_fail);
1577
1578 // Check if the allocation fits into the remaining space.
1579 // EAX: potential new object start.
1580 // EBX: potential next object start.
1581 // EDI: allocation size.
1582 __ cmpl(EBX, Address(THR, target::Thread::end_offset()));
1583 __ j(ABOVE_EQUAL, &pop_and_fail);
1584 __ CheckAllocationCanary(EAX);
1585
1586 // Successfully allocated the object(s), now update top to point to
1587 // next object start and initialize the object.
1588 __ movl(Address(THR, target::Thread::top_offset()), EBX);
1589 __ addl(EAX, Immediate(kHeapObjectTag));
1590 // Clear last double word to ensure string comparison doesn't need to
1591 // specially handle remainder of strings with lengths not factors of double
1592 // offsets.
1593 ASSERT(target::kWordSize == 4);
1594 __ movl(Address(EBX, -1 * target::kWordSize), Immediate(0));
1595 __ movl(Address(EBX, -2 * target::kWordSize), Immediate(0));
1596 // Initialize the tags.
1597 // EAX: new object start as a tagged pointer.
1598 // EBX: new object end address.
1599 // EDI: allocation size.
1600 {
1601 Label size_tag_overflow, done;
1602 __ cmpl(EDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
1603 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
1604 __ shll(EDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
1607
1608 __ Bind(&size_tag_overflow);
1609 __ xorl(EDI, EDI);
1610 __ Bind(&done);
1611
1612 // Get the class index and insert it into the tags.
1613 const uword tags =
1614 target::MakeTagWordForNewSpaceObject(cid, /*instance_size=*/0);
1615 __ orl(EDI, Immediate(tags));
1616 __ movl(FieldAddress(EAX, target::Object::tags_offset()), EDI); // Tags.
1617 }
1618
1619 // Set the length field.
1620 __ popl(EDI);
1621 __ StoreIntoObjectNoBarrier(
1622 EAX, FieldAddress(EAX, target::String::length_offset()), EDI);
1623 // Clear hash.
1624 __ ZeroInitSmiField(FieldAddress(EAX, target::String::hash_offset()));
1626
1627 __ Bind(&pop_and_fail);
1628 __ popl(EDI);
1629 __ jmp(failure);
1630}
1631
1632// Arg0: OneByteString (receiver)
1633// Arg1: Start index as Smi.
1634// Arg2: End index as Smi.
1635// The indexes must be valid.
1636void AsmIntrinsifier::OneByteString_substringUnchecked(Assembler* assembler,
1637 Label* normal_ir_body) {
1638 const intptr_t kStringOffset = 3 * target::kWordSize;
1639 const intptr_t kStartIndexOffset = 2 * target::kWordSize;
1640 const intptr_t kEndIndexOffset = 1 * target::kWordSize;
1641 Label ok;
1642 __ movl(EAX, Address(ESP, +kStartIndexOffset));
1643 __ movl(EDI, Address(ESP, +kEndIndexOffset));
1644 __ orl(EAX, EDI);
1645 __ testl(EAX, Immediate(kSmiTagMask));
1646 __ j(NOT_ZERO, normal_ir_body); // 'start', 'end' not Smi.
1647
1648 __ subl(EDI, Address(ESP, +kStartIndexOffset));
1649 TryAllocateString(assembler, kOneByteStringCid,
1650 target::OneByteString::kMaxNewSpaceElements, &ok,
1651 normal_ir_body, EDI);
1652 __ Bind(&ok);
1653 // EAX: new string as tagged pointer.
1654 // Copy string.
1655 __ movl(EDI, Address(ESP, +kStringOffset));
1656 __ movl(EBX, Address(ESP, +kStartIndexOffset));
1657 __ SmiUntag(EBX);
1658 __ leal(EDI, FieldAddress(EDI, EBX, TIMES_1,
1659 target::OneByteString::data_offset()));
1660 // EDI: Start address to copy from (untagged).
1661 // EBX: Untagged start index.
1662 __ movl(ECX, Address(ESP, +kEndIndexOffset));
1663 __ SmiUntag(ECX);
1664 __ subl(ECX, EBX);
1665 __ xorl(EDX, EDX);
1666 // EDI: Start address to copy from (untagged).
1667 // ECX: Untagged number of bytes to copy.
1668 // EAX: Tagged result string.
1669 // EDX: Loop counter.
1670 // EBX: Scratch register.
1671 Label loop, check;
1673 __ Bind(&loop);
1674 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0));
1675 __ movb(FieldAddress(EAX, EDX, TIMES_1, target::OneByteString::data_offset()),
1676 BL);
1677 __ incl(EDX);
1678 __ Bind(&check);
1679 __ cmpl(EDX, ECX);
1680 __ j(LESS, &loop, Assembler::kNearJump);
1681 __ ret();
1682 __ Bind(normal_ir_body);
1683}
1684
1685void AsmIntrinsifier::WriteIntoOneByteString(Assembler* assembler,
1686 Label* normal_ir_body) {
1687 __ movl(ECX, Address(ESP, +1 * target::kWordSize)); // Value.
1688 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Index.
1689 __ movl(EAX, Address(ESP, +3 * target::kWordSize)); // OneByteString.
1690 __ SmiUntag(EBX);
1691 __ SmiUntag(ECX);
1692 __ movb(FieldAddress(EAX, EBX, TIMES_1, target::OneByteString::data_offset()),
1693 CL);
1694 __ ret();
1695}
1696
1697void AsmIntrinsifier::WriteIntoTwoByteString(Assembler* assembler,
1698 Label* normal_ir_body) {
1699 __ movl(ECX, Address(ESP, +1 * target::kWordSize)); // Value.
1700 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Index.
1701 __ movl(EAX, Address(ESP, +3 * target::kWordSize)); // TwoByteString.
1702 // Untag index and multiply by element size -> no-op.
1703 __ SmiUntag(ECX);
1704 __ movw(FieldAddress(EAX, EBX, TIMES_1, target::TwoByteString::data_offset()),
1705 ECX);
1706 __ ret();
1707}
1708
1709void AsmIntrinsifier::AllocateOneByteString(Assembler* assembler,
1710 Label* normal_ir_body) {
1711 __ movl(EDI, Address(ESP, +1 * target::kWordSize)); // Length.
1712 Label ok;
1713 TryAllocateString(assembler, kOneByteStringCid,
1714 target::OneByteString::kMaxNewSpaceElements, &ok,
1715 normal_ir_body, EDI);
1716 // EDI: Start address to copy from (untagged).
1717
1718 __ Bind(&ok);
1719 __ ret();
1720
1721 __ Bind(normal_ir_body);
1722}
1723
1724void AsmIntrinsifier::AllocateTwoByteString(Assembler* assembler,
1725 Label* normal_ir_body) {
1726 __ movl(EDI, Address(ESP, +1 * target::kWordSize)); // Length.
1727 Label ok;
1728 TryAllocateString(assembler, kTwoByteStringCid,
1729 target::TwoByteString::kMaxNewSpaceElements, &ok,
1730 normal_ir_body, EDI);
1731 // EDI: Start address to copy from (untagged).
1732
1733 __ Bind(&ok);
1734 __ ret();
1735
1736 __ Bind(normal_ir_body);
1737}
1738
1739void AsmIntrinsifier::OneByteString_equality(Assembler* assembler,
1740 Label* normal_ir_body) {
1741 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // This.
1742 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Other.
1743
1744 StringEquality(assembler, EAX, EBX, EDI, ECX, EAX, normal_ir_body,
1745 kOneByteStringCid);
1746}
1747
1748void AsmIntrinsifier::TwoByteString_equality(Assembler* assembler,
1749 Label* normal_ir_body) {
1750 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // This.
1751 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Other.
1752
1753 StringEquality(assembler, EAX, EBX, EDI, ECX, EAX, normal_ir_body,
1754 kTwoByteStringCid);
1755}
1756
1757void AsmIntrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler,
1758 Label* normal_ir_body,
1759 bool sticky) {
1760 if (FLAG_interpret_irregexp) return;
1761
1762 const intptr_t kRegExpParamOffset = 3 * target::kWordSize;
1763 const intptr_t kStringParamOffset = 2 * target::kWordSize;
1764 // start_index smi is located at offset 1.
1765
1766 // Incoming registers:
1767 // EAX: Function. (Will be loaded with the specialized matcher function.)
1768 // ECX: Unknown. (Must be GC safe on tail call.)
1769 // EDX: Arguments descriptor. (Will be preserved.)
1770
1771 // Load the specialized function pointer into EAX. Leverage the fact the
1772 // string CIDs as well as stored function pointers are in sequence.
1773 __ movl(EBX, Address(ESP, kRegExpParamOffset));
1774 __ movl(EDI, Address(ESP, kStringParamOffset));
1775 __ LoadClassId(EDI, EDI);
1776 __ SubImmediate(EDI, Immediate(kOneByteStringCid));
1777 __ movl(FUNCTION_REG, FieldAddress(EBX, EDI, TIMES_4,
1778 target::RegExp::function_offset(
1779 kOneByteStringCid, sticky)));
1780
1781 // Registers are now set up for the lazy compile stub. It expects the function
1782 // in EAX, the argument descriptor in EDX, and IC-Data in ECX.
1783 __ xorl(ECX, ECX);
1784
1785 // Tail-call the function.
1786 __ jmp(FieldAddress(FUNCTION_REG, target::Function::entry_point_offset()));
1787}
1788
1789void AsmIntrinsifier::UserTag_defaultTag(Assembler* assembler,
1790 Label* normal_ir_body) {
1791 __ LoadIsolate(EAX);
1792 __ movl(EAX, Address(EAX, target::Isolate::default_tag_offset()));
1793 __ ret();
1794}
1795
1796void AsmIntrinsifier::Profiler_getCurrentTag(Assembler* assembler,
1797 Label* normal_ir_body) {
1798 __ LoadIsolate(EAX);
1799 __ movl(EAX, Address(EAX, target::Isolate::current_tag_offset()));
1800 __ ret();
1801}
1802
1803void AsmIntrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler,
1804 Label* normal_ir_body) {
1805#if !defined(SUPPORT_TIMELINE)
1806 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1807 __ ret();
1808#else
1809 Label true_label;
1810 // Load TimelineStream*.
1811 __ movl(EAX, Address(THR, target::Thread::dart_stream_offset()));
1812 // Load uintptr_t from TimelineStream*.
1813 __ movl(EAX, Address(EAX, target::TimelineStream::enabled_offset()));
1814 __ cmpl(EAX, Immediate(0));
1815 __ j(NOT_ZERO, &true_label, Assembler::kNearJump);
1816 // Not enabled.
1817 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1818 __ ret();
1819 // Enabled.
1820 __ Bind(&true_label);
1821 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1822 __ ret();
1823#endif
1824}
1825
1826void AsmIntrinsifier::Timeline_getNextTaskId(Assembler* assembler,
1827 Label* normal_ir_body) {
1828#if !defined(SUPPORT_TIMELINE)
1829 __ LoadImmediate(EAX, target::ToRawSmi(0));
1830 __ ret();
1831#else
1832 __ movl(EBX, Address(THR, target::Thread::next_task_id_offset()));
1833 __ movl(ECX, Address(THR, target::Thread::next_task_id_offset() + 4));
1834 __ movl(EAX, EBX);
1835 __ SmiTag(EAX); // Ignore loss of precision.
1836 __ addl(EBX, Immediate(1));
1837 __ adcl(ECX, Immediate(0));
1838 __ movl(Address(THR, target::Thread::next_task_id_offset()), EBX);
1839 __ movl(Address(THR, target::Thread::next_task_id_offset() + 4), ECX);
1840 __ ret();
1841#endif
1842}
1843
1844#undef __
1845
1846} // namespace compiler
1847} // namespace dart
1848
1849#endif // defined(TARGET_ARCH_IA32)
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
static bool equal(const SkBitmap &a, const SkBitmap &b)
#define check(reporter, ref, unref, make, kill)
static bool ok(int result)
#define __
#define UNREACHABLE()
Definition assert.h:248
#define COMPILE_ASSERT(expr)
Definition assert.h:339
#define ASSERT(E)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
word ToRawSmi(const dart::Object &a)
const Bool & TrueObject()
const Bool & FalseObject()
const Object & NullObject()
const Class & DoubleClass()
const Class & MintClass()
const Register THR
static bool CompareIntegers(Token::Kind kind, const Integer &left, const Integer &right)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition hash.h:12
int32_t classid_t
Definition globals.h:524
@ kNumPredefinedCids
Definition class_id.h:257
@ kHeapObjectTag
uintptr_t uword
Definition globals.h:501
@ GREATER_EQUAL
@ NOT_CARRY
@ BELOW_EQUAL
@ ABOVE_EQUAL
@ PARITY_EVEN
@ kNoRegister
const intptr_t cid
const Register FUNCTION_REG
const intptr_t kBytesPerBigIntDigit
Definition globals.h:54
SIN Vec< N, uint16_t > mull(const Vec< N, uint8_t > &x, const Vec< N, uint8_t > &y)
Definition SkVx.h:906
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
#define NOT_IN_PRODUCT(code)
Definition globals.h:84