Flutter Engine
The Flutter Engine
asm_intrinsifier_ia32.cc
Go to the documentation of this file.
1// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4//
5// The intrinsic code below is executed before a method has built its frame.
6// The return address is on the stack and the arguments below it.
7// Registers EDX (arguments descriptor) and ECX (function) must be preserved.
8// Each intrinsification method returns true if the corresponding
9// Dart method was intrinsified.
10
11#include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
12#if defined(TARGET_ARCH_IA32)
13
14#define SHOULD_NOT_INCLUDE_RUNTIME
15
16#include "vm/class_id.h"
19
20namespace dart {
21namespace compiler {
22
23// When entering intrinsics code:
24// ECX: IC Data
25// EDX: Arguments descriptor
26// TOS: Return address
27// The ECX, EDX registers can be destroyed only if there is no slow-path, i.e.
28// if the intrinsified method always executes a return.
29// The EBP register should not be modified, because it is used by the profiler.
30// The THR register (see constants_ia32.h) must be preserved.
31
32#define __ assembler->
33
34// Tests if two top most arguments are smis, jumps to label not_smi if not.
35// Topmost argument is in EAX.
36static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) {
37 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
38 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
39 __ orl(EBX, EAX);
40 __ testl(EBX, Immediate(kSmiTagMask));
41 __ j(NOT_ZERO, not_smi, Assembler::kNearJump);
42}
43
44void AsmIntrinsifier::Integer_shl(Assembler* assembler, Label* normal_ir_body) {
45 ASSERT(kSmiTagShift == 1);
46 ASSERT(kSmiTag == 0);
47 Label overflow;
48 TestBothArgumentsSmis(assembler, normal_ir_body);
49 // Shift value is in EAX. Compare with tagged Smi.
50 __ cmpl(EAX, Immediate(target::ToRawSmi(target::kSmiBits)));
51 __ j(ABOVE_EQUAL, normal_ir_body, Assembler::kNearJump);
52
53 __ SmiUntag(EAX);
54 __ movl(ECX, EAX); // Shift amount must be in ECX.
55 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Value.
56
57 // Overflow test - all the shifted-out bits must be same as the sign bit.
58 __ movl(EBX, EAX);
59 __ shll(EAX, ECX);
60 __ sarl(EAX, ECX);
61 __ cmpl(EAX, EBX);
62 __ j(NOT_EQUAL, &overflow, Assembler::kNearJump);
63
64 __ shll(EAX, ECX); // Shift for result now we know there is no overflow.
65
66 // EAX is a correctly tagged Smi.
67 __ ret();
68
69 __ Bind(&overflow);
70 // Arguments are Smi but the shift produced an overflow to Mint.
71 __ cmpl(EBX, Immediate(0));
72 // TODO(srdjan): Implement negative values, for now fall through.
73 __ j(LESS, normal_ir_body, Assembler::kNearJump);
74 __ SmiUntag(EBX);
75 __ movl(EAX, EBX);
76 __ shll(EBX, ECX);
77 __ xorl(EDI, EDI);
78 __ shldl(EDI, EAX, ECX);
79 // Result in EDI (high) and EBX (low).
80 const Class& mint_class = MintClass();
81 __ TryAllocate(mint_class, normal_ir_body, Assembler::kNearJump,
82 EAX, // Result register.
83 ECX); // temp
84 // EBX and EDI are not objects but integer values.
85 __ movl(FieldAddress(EAX, target::Mint::value_offset()), EBX);
86 __ movl(FieldAddress(EAX, target::Mint::value_offset() + target::kWordSize),
87 EDI);
88 __ ret();
89 __ Bind(normal_ir_body);
90}
91
92static void Push64SmiOrMint(Assembler* assembler,
93 Register reg,
94 Register tmp,
95 Label* not_smi_or_mint) {
96 Label not_smi, done;
97 __ testl(reg, Immediate(kSmiTagMask));
98 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
99 __ SmiUntag(reg);
100 // Sign extend to 64 bit
101 __ movl(tmp, reg);
102 __ sarl(tmp, Immediate(31));
103 __ pushl(tmp);
104 __ pushl(reg);
105 __ jmp(&done);
106 __ Bind(&not_smi);
107 __ CompareClassId(reg, kMintCid, tmp);
108 __ j(NOT_EQUAL, not_smi_or_mint);
109 // Mint.
110 __ pushl(FieldAddress(reg, target::Mint::value_offset() + target::kWordSize));
111 __ pushl(FieldAddress(reg, target::Mint::value_offset()));
112 __ Bind(&done);
113}
114
115static void CompareIntegers(Assembler* assembler,
116 Label* normal_ir_body,
117 Condition true_condition) {
118 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through;
119 TestBothArgumentsSmis(assembler, &try_mint_smi);
120 // EAX contains the right argument.
121 __ cmpl(Address(ESP, +2 * target::kWordSize), EAX);
122 __ j(true_condition, &is_true, Assembler::kNearJump);
123 __ Bind(&is_false);
124 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
125 __ ret();
126 __ Bind(&is_true);
127 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
128 __ ret();
129
130 // 64-bit comparison
131 Condition hi_true_cond, hi_false_cond, lo_false_cond;
132 switch (true_condition) {
133 case LESS:
134 case LESS_EQUAL:
135 hi_true_cond = LESS;
136 hi_false_cond = GREATER;
137 lo_false_cond = (true_condition == LESS) ? ABOVE_EQUAL : ABOVE;
138 break;
139 case GREATER:
140 case GREATER_EQUAL:
141 hi_true_cond = GREATER;
142 hi_false_cond = LESS;
143 lo_false_cond = (true_condition == GREATER) ? BELOW_EQUAL : BELOW;
144 break;
145 default:
146 UNREACHABLE();
147 hi_true_cond = hi_false_cond = lo_false_cond = OVERFLOW;
148 }
149 __ Bind(&try_mint_smi);
150 // Note that EDX and ECX must be preserved in case we fall through to main
151 // method.
152 // EAX contains the right argument.
153 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Left argument.
154 // Push left as 64 bit integer.
155 Push64SmiOrMint(assembler, EBX, EDI, normal_ir_body);
156 // Push right as 64 bit integer.
157 Push64SmiOrMint(assembler, EAX, EDI, &drop_two_fall_through);
158 __ popl(EBX); // Right.LO.
159 __ popl(ECX); // Right.HI.
160 __ popl(EAX); // Left.LO.
161 __ popl(EDX); // Left.HI.
162 __ cmpl(EDX, ECX); // cmpl left.HI, right.HI.
163 __ j(hi_false_cond, &is_false, Assembler::kNearJump);
164 __ j(hi_true_cond, &is_true, Assembler::kNearJump);
165 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO.
166 __ j(lo_false_cond, &is_false, Assembler::kNearJump);
167 // Else is true.
168 __ jmp(&is_true);
169
170 __ Bind(&drop_two_fall_through);
171 __ Drop(2);
172 __ Bind(normal_ir_body);
173}
174
175void AsmIntrinsifier::Integer_lessThan(Assembler* assembler,
176 Label* normal_ir_body) {
177 CompareIntegers(assembler, normal_ir_body, LESS);
178}
179
180void AsmIntrinsifier::Integer_greaterThan(Assembler* assembler,
181 Label* normal_ir_body) {
182 CompareIntegers(assembler, normal_ir_body, GREATER);
183}
184
185void AsmIntrinsifier::Integer_lessEqualThan(Assembler* assembler,
186 Label* normal_ir_body) {
187 CompareIntegers(assembler, normal_ir_body, LESS_EQUAL);
188}
189
190void AsmIntrinsifier::Integer_greaterEqualThan(Assembler* assembler,
191 Label* normal_ir_body) {
192 CompareIntegers(assembler, normal_ir_body, GREATER_EQUAL);
193}
194
195// This is called for Smi and Mint receivers. The right argument
196// can be Smi, Mint or double.
197void AsmIntrinsifier::Integer_equalToInteger(Assembler* assembler,
198 Label* normal_ir_body) {
199 Label true_label, check_for_mint;
200 // For integer receiver '===' check first.
201 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
202 __ cmpl(EAX, Address(ESP, +2 * target::kWordSize));
203 __ j(EQUAL, &true_label, Assembler::kNearJump);
204 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
205 __ orl(EAX, EBX);
206 __ testl(EAX, Immediate(kSmiTagMask));
207 __ j(NOT_ZERO, &check_for_mint, Assembler::kNearJump);
208 // Both arguments are smi, '===' is good enough.
209 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
210 __ ret();
211 __ Bind(&true_label);
212 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
213 __ ret();
214
215 // At least one of the arguments was not Smi.
216 Label receiver_not_smi;
217 __ Bind(&check_for_mint);
218 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Receiver.
219 __ testl(EAX, Immediate(kSmiTagMask));
220 __ j(NOT_ZERO, &receiver_not_smi);
221
222 // Left (receiver) is Smi, return false if right is not Double.
223 // Note that an instance of Mint never contains a value that can be
224 // represented by Smi.
225 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Right argument.
226 __ CompareClassId(EAX, kDoubleCid, EDI);
227 __ j(EQUAL, normal_ir_body);
228 __ LoadObject(EAX,
229 CastHandle<Object>(FalseObject())); // Smi == Mint -> false.
230 __ ret();
231
232 __ Bind(&receiver_not_smi);
233 // EAX:: receiver.
234 __ CompareClassId(EAX, kMintCid, EDI);
235 __ j(NOT_EQUAL, normal_ir_body);
236 // Receiver is Mint, return false if right is Smi.
237 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Right argument.
238 __ testl(EAX, Immediate(kSmiTagMask));
239 __ j(NOT_ZERO, normal_ir_body);
240 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
241 __ ret();
242 // TODO(srdjan): Implement Mint == Mint comparison.
243
244 __ Bind(normal_ir_body);
245}
246
247void AsmIntrinsifier::Integer_equal(Assembler* assembler,
248 Label* normal_ir_body) {
249 Integer_equalToInteger(assembler, normal_ir_body);
250}
251
252// Argument is Smi (receiver).
253void AsmIntrinsifier::Smi_bitLength(Assembler* assembler,
254 Label* normal_ir_body) {
255 ASSERT(kSmiTagShift == 1);
256 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // Receiver.
257 // XOR with sign bit to complement bits if value is negative.
258 __ movl(ECX, EAX);
259 __ sarl(ECX, Immediate(31)); // All 0 or all 1.
260 __ xorl(EAX, ECX);
261 // BSR does not write the destination register if source is zero. Put a 1 in
262 // the Smi tag bit to ensure BSR writes to destination register.
263 __ orl(EAX, Immediate(kSmiTagMask));
264 __ bsrl(EAX, EAX);
265 __ SmiTag(EAX);
266 __ ret();
267}
268
269void AsmIntrinsifier::Bigint_lsh(Assembler* assembler, Label* normal_ir_body) {
270 // static void _lsh(Uint32List x_digits, int x_used, int n,
271 // Uint32List r_digits)
272
273 // Preserve THR to free ESI.
274 __ pushl(THR);
275 ASSERT(THR == ESI);
276
277 __ movl(EDI, Address(ESP, 5 * target::kWordSize)); // x_digits
278 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // n is Smi
279 __ SmiUntag(ECX);
280 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
281 __ movl(ESI, ECX);
282 __ sarl(ESI, Immediate(5)); // ESI = n ~/ _DIGIT_BITS.
283 __ leal(EBX,
285 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // x_used > 0, Smi.
286 __ SmiUntag(ESI);
287 __ decl(ESI);
288 __ xorl(EAX, EAX); // EAX = 0.
289 __ movl(EDX,
291 __ shldl(EAX, EDX, ECX);
292 __ movl(Address(EBX, ESI, TIMES_4, kBytesPerBigIntDigit), EAX);
293 Label last;
294 __ cmpl(ESI, Immediate(0));
295 __ j(EQUAL, &last, Assembler::kNearJump);
296 Label loop;
297 __ Bind(&loop);
298 __ movl(EAX, EDX);
299 __ movl(EDX, FieldAddress(
300 EDI, ESI, TIMES_4,
302 __ shldl(EAX, EDX, ECX);
303 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX);
304 __ decl(ESI);
306 __ Bind(&last);
307 __ shldl(EDX, ESI, ECX); // ESI == 0.
308 __ movl(Address(EBX, 0), EDX);
309
310 // Restore THR and return.
311 __ popl(THR);
312 __ LoadObject(EAX, NullObject());
313 __ ret();
314}
315
316void AsmIntrinsifier::Bigint_rsh(Assembler* assembler, Label* normal_ir_body) {
317 // static void _rsh(Uint32List x_digits, int x_used, int n,
318 // Uint32List r_digits)
319
320 // Preserve THR to free ESI.
321 __ pushl(THR);
322 ASSERT(THR == ESI);
323
324 __ movl(EDI, Address(ESP, 5 * target::kWordSize)); // x_digits
325 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // n is Smi
326 __ SmiUntag(ECX);
327 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
328 __ movl(EDX, ECX);
329 __ sarl(EDX, Immediate(5)); // EDX = n ~/ _DIGIT_BITS.
330 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // x_used > 0, Smi.
331 __ SmiUntag(ESI);
332 __ decl(ESI);
333 // EDI = &x_digits[x_used - 1].
334 __ leal(EDI,
336 __ subl(ESI, EDX);
337 // EBX = &r_digits[x_used - 1 - (n ~/ 32)].
338 __ leal(EBX,
340 __ negl(ESI);
341 __ movl(EDX, Address(EDI, ESI, TIMES_4, 0));
342 Label last;
343 __ cmpl(ESI, Immediate(0));
344 __ j(EQUAL, &last, Assembler::kNearJump);
345 Label loop;
346 __ Bind(&loop);
347 __ movl(EAX, EDX);
348 __ movl(EDX, Address(EDI, ESI, TIMES_4, kBytesPerBigIntDigit));
349 __ shrdl(EAX, EDX, ECX);
350 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX);
351 __ incl(ESI);
353 __ Bind(&last);
354 __ shrdl(EDX, ESI, ECX); // ESI == 0.
355 __ movl(Address(EBX, 0), EDX);
356
357 // Restore THR and return.
358 __ popl(THR);
359 __ LoadObject(EAX, NullObject());
360 __ ret();
361}
362
363void AsmIntrinsifier::Bigint_absAdd(Assembler* assembler,
364 Label* normal_ir_body) {
365 // static void _absAdd(Uint32List digits, int used,
366 // Uint32List a_digits, int a_used,
367 // Uint32List r_digits)
368
369 // Preserve THR to free ESI.
370 __ pushl(THR);
371 ASSERT(THR == ESI);
372
373 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // digits
374 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // used is Smi
375 __ SmiUntag(EAX); // used > 0.
376 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
377 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // a_used is Smi
378 __ SmiUntag(ECX); // a_used > 0.
379 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
380
381 // Precompute 'used - a_used' now so that carry flag is not lost later.
382 __ subl(EAX, ECX);
383 __ incl(EAX); // To account for the extra test between loops.
384 __ pushl(EAX);
385
386 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
387 Label add_loop;
388 __ Bind(&add_loop);
389 // Loop a_used times, ECX = a_used, ECX > 0.
390 __ movl(EAX,
392 __ adcl(EAX,
394 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
395 EAX);
396 __ incl(EDX); // Does not affect carry flag.
397 __ decl(ECX); // Does not affect carry flag.
398 __ j(NOT_ZERO, &add_loop, Assembler::kNearJump);
399
400 Label last_carry;
401 __ popl(ECX);
402 __ decl(ECX); // Does not affect carry flag.
403 __ j(ZERO, &last_carry, Assembler::kNearJump); // If used - a_used == 0.
404
405 Label carry_loop;
406 __ Bind(&carry_loop);
407 // Loop used - a_used times, ECX = used - a_used, ECX > 0.
408 __ movl(EAX,
410 __ adcl(EAX, Immediate(0));
411 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
412 EAX);
413 __ incl(EDX); // Does not affect carry flag.
414 __ decl(ECX); // Does not affect carry flag.
415 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
416
417 __ Bind(&last_carry);
418 __ movl(EAX, Immediate(0));
419 __ adcl(EAX, Immediate(0));
420 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
421 EAX);
422
423 // Restore THR and return.
424 __ popl(THR);
425 __ LoadObject(EAX, NullObject());
426 __ ret();
427}
428
429void AsmIntrinsifier::Bigint_absSub(Assembler* assembler,
430 Label* normal_ir_body) {
431 // static void _absSub(Uint32List digits, int used,
432 // Uint32List a_digits, int a_used,
433 // Uint32List r_digits)
434
435 // Preserve THR to free ESI.
436 __ pushl(THR);
437 ASSERT(THR == ESI);
438
439 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // digits
440 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // used is Smi
441 __ SmiUntag(EAX); // used > 0.
442 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
443 __ movl(ECX, Address(ESP, 3 * target::kWordSize)); // a_used is Smi
444 __ SmiUntag(ECX); // a_used > 0.
445 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // r_digits
446
447 // Precompute 'used - a_used' now so that carry flag is not lost later.
448 __ subl(EAX, ECX);
449 __ incl(EAX); // To account for the extra test between loops.
450 __ pushl(EAX);
451
452 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
453 Label sub_loop;
454 __ Bind(&sub_loop);
455 // Loop a_used times, ECX = a_used, ECX > 0.
456 __ movl(EAX,
458 __ sbbl(EAX,
460 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
461 EAX);
462 __ incl(EDX); // Does not affect carry flag.
463 __ decl(ECX); // Does not affect carry flag.
464 __ j(NOT_ZERO, &sub_loop, Assembler::kNearJump);
465
466 Label done;
467 __ popl(ECX);
468 __ decl(ECX); // Does not affect carry flag.
469 __ j(ZERO, &done, Assembler::kNearJump); // If used - a_used == 0.
470
471 Label carry_loop;
472 __ Bind(&carry_loop);
473 // Loop used - a_used times, ECX = used - a_used, ECX > 0.
474 __ movl(EAX,
476 __ sbbl(EAX, Immediate(0));
477 __ movl(FieldAddress(EBX, EDX, TIMES_4, target::TypedData::payload_offset()),
478 EAX);
479 __ incl(EDX); // Does not affect carry flag.
480 __ decl(ECX); // Does not affect carry flag.
481 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
482
483 __ Bind(&done);
484 // Restore THR and return.
485 __ popl(THR);
486 __ LoadObject(EAX, NullObject());
487 __ ret();
488}
489
490void AsmIntrinsifier::Bigint_mulAdd(Assembler* assembler,
491 Label* normal_ir_body) {
492 // Pseudo code:
493 // static int _mulAdd(Uint32List x_digits, int xi,
494 // Uint32List m_digits, int i,
495 // Uint32List a_digits, int j, int n) {
496 // uint32_t x = x_digits[xi >> 1]; // xi is Smi.
497 // if (x == 0 || n == 0) {
498 // return 1;
499 // }
500 // uint32_t* mip = &m_digits[i >> 1]; // i is Smi.
501 // uint32_t* ajp = &a_digits[j >> 1]; // j is Smi.
502 // uint32_t c = 0;
503 // SmiUntag(n);
504 // do {
505 // uint32_t mi = *mip++;
506 // uint32_t aj = *ajp;
507 // uint64_t t = x*mi + aj + c; // 32-bit * 32-bit -> 64-bit.
508 // *ajp++ = low32(t);
509 // c = high32(t);
510 // } while (--n > 0);
511 // while (c != 0) {
512 // uint64_t t = *ajp + c;
513 // *ajp++ = low32(t);
514 // c = high32(t); // c == 0 or 1.
515 // }
516 // return 1;
517 // }
518
519 Label no_op;
520 // EBX = x, no_op if x == 0
521 __ movl(ECX, Address(ESP, 7 * target::kWordSize)); // x_digits
522 __ movl(EAX, Address(ESP, 6 * target::kWordSize)); // xi is Smi
523 __ movl(EBX,
525 __ testl(EBX, EBX);
526 __ j(ZERO, &no_op, Assembler::kNearJump);
527
528 // EDX = SmiUntag(n), no_op if n == 0
529 __ movl(EDX, Address(ESP, 1 * target::kWordSize));
530 __ SmiUntag(EDX);
531 __ j(ZERO, &no_op, Assembler::kNearJump);
532
533 // Preserve THR to free ESI.
534 __ pushl(THR);
535 ASSERT(THR == ESI);
536
537 // EDI = mip = &m_digits[i >> 1]
538 __ movl(EDI, Address(ESP, 6 * target::kWordSize)); // m_digits
539 __ movl(EAX, Address(ESP, 5 * target::kWordSize)); // i is Smi
540 __ leal(EDI,
542
543 // ESI = ajp = &a_digits[j >> 1]
544 __ movl(ESI, Address(ESP, 4 * target::kWordSize)); // a_digits
545 __ movl(EAX, Address(ESP, 3 * target::kWordSize)); // j is Smi
546 __ leal(ESI,
548
549 // Save n
550 __ pushl(EDX);
551 Address n_addr = Address(ESP, 0 * target::kWordSize);
552
553 // ECX = c = 0
554 __ xorl(ECX, ECX);
555
556 Label muladd_loop;
557 __ Bind(&muladd_loop);
558 // x: EBX
559 // mip: EDI
560 // ajp: ESI
561 // c: ECX
562 // t: EDX:EAX (not live at loop entry)
563 // n: ESP[0]
564
565 // uint32_t mi = *mip++
566 __ movl(EAX, Address(EDI, 0));
567 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
568
569 // uint64_t t = x*mi
570 __ mull(EBX); // t = EDX:EAX = EAX * EBX
571 __ addl(EAX, ECX); // t += c
572 __ adcl(EDX, Immediate(0));
573
574 // uint32_t aj = *ajp; t += aj
575 __ addl(EAX, Address(ESI, 0));
576 __ adcl(EDX, Immediate(0));
577
578 // *ajp++ = low32(t)
579 __ movl(Address(ESI, 0), EAX);
580 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
581
582 // c = high32(t)
583 __ movl(ECX, EDX);
584
585 // while (--n > 0)
586 __ decl(n_addr); // --n
587 __ j(NOT_ZERO, &muladd_loop, Assembler::kNearJump);
588
589 Label done;
590 __ testl(ECX, ECX);
592
593 // *ajp += c
594 __ addl(Address(ESI, 0), ECX);
596
597 Label propagate_carry_loop;
598 __ Bind(&propagate_carry_loop);
599 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
600 __ incl(Address(ESI, 0)); // c == 0 or 1
601 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump);
602
603 __ Bind(&done);
604 __ Drop(1); // n
605 // Restore THR and return.
606 __ popl(THR);
607
608 __ Bind(&no_op);
609 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
610 __ ret();
611}
612
613void AsmIntrinsifier::Bigint_sqrAdd(Assembler* assembler,
614 Label* normal_ir_body) {
615 // Pseudo code:
616 // static int _sqrAdd(Uint32List x_digits, int i,
617 // Uint32List a_digits, int used) {
618 // uint32_t* xip = &x_digits[i >> 1]; // i is Smi.
619 // uint32_t x = *xip++;
620 // if (x == 0) return 1;
621 // uint32_t* ajp = &a_digits[i]; // j == 2*i, i is Smi.
622 // uint32_t aj = *ajp;
623 // uint64_t t = x*x + aj;
624 // *ajp++ = low32(t);
625 // uint64_t c = high32(t);
626 // int n = ((used - i) >> 1) - 1; // used and i are Smi.
627 // while (--n >= 0) {
628 // uint32_t xi = *xip++;
629 // uint32_t aj = *ajp;
630 // uint96_t t = 2*x*xi + aj + c; // 2-bit * 32-bit * 32-bit -> 65-bit.
631 // *ajp++ = low32(t);
632 // c = high64(t); // 33-bit.
633 // }
634 // uint32_t aj = *ajp;
635 // uint64_t t = aj + c; // 32-bit + 33-bit -> 34-bit.
636 // *ajp++ = low32(t);
637 // *ajp = high32(t);
638 // return 1;
639 // }
640
641 // EDI = xip = &x_digits[i >> 1]
642 __ movl(EDI, Address(ESP, 4 * target::kWordSize)); // x_digits
643 __ movl(EAX, Address(ESP, 3 * target::kWordSize)); // i is Smi
644 __ leal(EDI,
646
647 // EBX = x = *xip++, return if x == 0
648 Label x_zero;
649 __ movl(EBX, Address(EDI, 0));
650 __ cmpl(EBX, Immediate(0));
651 __ j(EQUAL, &x_zero, Assembler::kNearJump);
652 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
653
654 // Preserve THR to free ESI.
655 __ pushl(THR);
656 ASSERT(THR == ESI);
657
658 // ESI = ajp = &a_digits[i]
659 __ movl(ESI, Address(ESP, 3 * target::kWordSize)); // a_digits
660 __ leal(ESI,
662
663 // EDX:EAX = t = x*x + *ajp
664 __ movl(EAX, EBX);
665 __ mull(EBX);
666 __ addl(EAX, Address(ESI, 0));
667 __ adcl(EDX, Immediate(0));
668
669 // *ajp++ = low32(t)
670 __ movl(Address(ESI, 0), EAX);
671 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
672
673 // int n = used - i - 1
674 __ movl(EAX, Address(ESP, 2 * target::kWordSize)); // used is Smi
675 __ subl(EAX, Address(ESP, 4 * target::kWordSize)); // i is Smi
676 __ SmiUntag(EAX);
677 __ decl(EAX);
678 __ pushl(EAX); // Save n on stack.
679
680 // uint64_t c = high32(t)
681 __ pushl(Immediate(0)); // push high32(c) == 0
682 __ pushl(EDX); // push low32(c) == high32(t)
683
684 Address n_addr = Address(ESP, 2 * target::kWordSize);
685 Address ch_addr = Address(ESP, 1 * target::kWordSize);
686 Address cl_addr = Address(ESP, 0 * target::kWordSize);
687
688 Label loop, done;
689 __ Bind(&loop);
690 // x: EBX
691 // xip: EDI
692 // ajp: ESI
693 // c: ESP[1]:ESP[0]
694 // t: ECX:EDX:EAX (not live at loop entry)
695 // n: ESP[2]
696
697 // while (--n >= 0)
698 __ decl(Address(ESP, 2 * target::kWordSize)); // --n
700
701 // uint32_t xi = *xip++
702 __ movl(EAX, Address(EDI, 0));
703 __ addl(EDI, Immediate(kBytesPerBigIntDigit));
704
705 // uint96_t t = ECX:EDX:EAX = 2*x*xi + aj + c
706 __ mull(EBX); // EDX:EAX = EAX * EBX
707 __ xorl(ECX, ECX); // ECX = 0
708 __ shldl(ECX, EDX, Immediate(1));
709 __ shldl(EDX, EAX, Immediate(1));
710 __ shll(EAX, Immediate(1)); // ECX:EDX:EAX <<= 1
711 __ addl(EAX, Address(ESI, 0)); // t += aj
712 __ adcl(EDX, Immediate(0));
713 __ adcl(ECX, Immediate(0));
714 __ addl(EAX, cl_addr); // t += low32(c)
715 __ adcl(EDX, ch_addr); // t += high32(c) << 32
716 __ adcl(ECX, Immediate(0));
717
718 // *ajp++ = low32(t)
719 __ movl(Address(ESI, 0), EAX);
720 __ addl(ESI, Immediate(kBytesPerBigIntDigit));
721
722 // c = high64(t)
723 __ movl(cl_addr, EDX);
724 __ movl(ch_addr, ECX);
725
726 __ jmp(&loop, Assembler::kNearJump);
727
728 __ Bind(&done);
729 // uint64_t t = aj + c
730 __ movl(EAX, cl_addr); // t = c
731 __ movl(EDX, ch_addr);
732 __ addl(EAX, Address(ESI, 0)); // t += *ajp
733 __ adcl(EDX, Immediate(0));
734
735 // *ajp++ = low32(t)
736 // *ajp = high32(t)
737 __ movl(Address(ESI, 0), EAX);
738 __ movl(Address(ESI, kBytesPerBigIntDigit), EDX);
739
740 // Restore THR and return.
741 __ Drop(3);
742 __ popl(THR);
743 __ Bind(&x_zero);
744 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
745 __ ret();
746}
747
748void AsmIntrinsifier::Bigint_estimateQuotientDigit(Assembler* assembler,
749 Label* normal_ir_body) {
750 // Pseudo code:
751 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) {
752 // uint32_t yt = args[_YT]; // _YT == 1.
753 // uint32_t* dp = &digits[i >> 1]; // i is Smi.
754 // uint32_t dh = dp[0]; // dh == digits[i >> 1].
755 // uint32_t qd;
756 // if (dh == yt) {
757 // qd = DIGIT_MASK;
758 // } else {
759 // dl = dp[-1]; // dl == digits[(i - 1) >> 1].
760 // qd = dh:dl / yt; // No overflow possible, because dh < yt.
761 // }
762 // args[_QD] = qd; // _QD == 2.
763 // return 1;
764 // }
765
766 // EDI = args
767 __ movl(EDI, Address(ESP, 3 * target::kWordSize)); // args
768
769 // ECX = yt = args[1]
770 __ movl(ECX, FieldAddress(EDI, target::TypedData::payload_offset() +
772
773 // EBX = dp = &digits[i >> 1]
774 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // digits
775 __ movl(EAX, Address(ESP, 1 * target::kWordSize)); // i is Smi
776 __ leal(EBX,
778
779 // EDX = dh = dp[0]
780 __ movl(EDX, Address(EBX, 0));
781
782 // EAX = qd = DIGIT_MASK = -1
783 __ movl(EAX, Immediate(-1));
784
785 // Return qd if dh == yt
786 Label return_qd;
787 __ cmpl(EDX, ECX);
788 __ j(EQUAL, &return_qd, Assembler::kNearJump);
789
790 // EAX = dl = dp[-1]
791 __ movl(EAX, Address(EBX, -kBytesPerBigIntDigit));
792
793 // EAX = qd = dh:dl / yt = EDX:EAX / ECX
794 __ divl(ECX);
795
796 __ Bind(&return_qd);
797 // args[2] = qd
798 __ movl(FieldAddress(EDI, target::TypedData::payload_offset() +
800 EAX);
801
802 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
803 __ ret();
804}
805
806void AsmIntrinsifier::Montgomery_mulMod(Assembler* assembler,
807 Label* normal_ir_body) {
808 // Pseudo code:
809 // static int _mulMod(Uint32List args, Uint32List digits, int i) {
810 // uint32_t rho = args[_RHO]; // _RHO == 2.
811 // uint32_t d = digits[i >> 1]; // i is Smi.
812 // uint64_t t = rho*d;
813 // args[_MU] = t mod DIGIT_BASE; // _MU == 4.
814 // return 1;
815 // }
816
817 // EDI = args
818 __ movl(EDI, Address(ESP, 3 * target::kWordSize)); // args
819
820 // ECX = rho = args[2]
821 __ movl(ECX, FieldAddress(EDI, target::TypedData::payload_offset() +
823
824 // EAX = digits[i >> 1]
825 __ movl(EBX, Address(ESP, 2 * target::kWordSize)); // digits
826 __ movl(EAX, Address(ESP, 1 * target::kWordSize)); // i is Smi
827 __ movl(EAX,
829
830 // EDX:EAX = t = rho*d
831 __ mull(ECX);
832
833 // args[4] = t mod DIGIT_BASE = low32(t)
834 __ movl(FieldAddress(EDI, target::TypedData::payload_offset() +
836 EAX);
837
838 __ movl(EAX, Immediate(target::ToRawSmi(1))); // One digit processed.
839 __ ret();
840}
841
842// Check if the last argument is a double, jump to label 'is_smi' if smi
843// (easy to convert to double), otherwise jump to label 'not_double_smi',
844// Returns the last argument in EAX.
845static void TestLastArgumentIsDouble(Assembler* assembler,
846 Label* is_smi,
847 Label* not_double_smi) {
848 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
849 __ testl(EAX, Immediate(kSmiTagMask));
850 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi.
851 __ CompareClassId(EAX, kDoubleCid, EBX);
852 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump);
853 // Fall through if double.
854}
855
856// Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown
857// type. Return true or false object in the register EAX. Any NaN argument
858// returns false. Any non-double arg1 causes control flow to fall through to the
859// slow case (compiled method body).
860static void CompareDoubles(Assembler* assembler,
861 Label* normal_ir_body,
862 Condition true_condition) {
863 Label is_false, is_true, is_smi, double_op;
864 TestLastArgumentIsDouble(assembler, &is_smi, normal_ir_body);
865 // Both arguments are double, right operand is in EAX.
866 __ movsd(XMM1, FieldAddress(EAX, target::Double::value_offset()));
867 __ Bind(&double_op);
868 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Left argument.
869 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
870 __ comisd(XMM0, XMM1);
871 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false;
872 __ j(true_condition, &is_true, Assembler::kNearJump);
873 // Fall through false.
874 __ Bind(&is_false);
875 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
876 __ ret();
877 __ Bind(&is_true);
878 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
879 __ ret();
880 __ Bind(&is_smi);
881 __ SmiUntag(EAX);
882 __ cvtsi2sd(XMM1, EAX);
883 __ jmp(&double_op);
884 __ Bind(normal_ir_body);
885}
886
887// arg0 is Double, arg1 is unknown.
888void AsmIntrinsifier::Double_greaterThan(Assembler* assembler,
889 Label* normal_ir_body) {
890 CompareDoubles(assembler, normal_ir_body, ABOVE);
891}
892
893// arg0 is Double, arg1 is unknown.
894void AsmIntrinsifier::Double_greaterEqualThan(Assembler* assembler,
895 Label* normal_ir_body) {
896 CompareDoubles(assembler, normal_ir_body, ABOVE_EQUAL);
897}
898
899// arg0 is Double, arg1 is unknown.
900void AsmIntrinsifier::Double_lessThan(Assembler* assembler,
901 Label* normal_ir_body) {
902 CompareDoubles(assembler, normal_ir_body, BELOW);
903}
904
905// arg0 is Double, arg1 is unknown.
906void AsmIntrinsifier::Double_equal(Assembler* assembler,
907 Label* normal_ir_body) {
908 CompareDoubles(assembler, normal_ir_body, EQUAL);
909}
910
911// arg0 is Double, arg1 is unknown.
912void AsmIntrinsifier::Double_lessEqualThan(Assembler* assembler,
913 Label* normal_ir_body) {
914 CompareDoubles(assembler, normal_ir_body, BELOW_EQUAL);
915}
916
917// Expects left argument to be double (receiver). Right argument is unknown.
918// Both arguments are on stack.
919static void DoubleArithmeticOperations(Assembler* assembler,
920 Label* normal_ir_body,
921 Token::Kind kind) {
922 Label is_smi, double_op;
923 TestLastArgumentIsDouble(assembler, &is_smi, normal_ir_body);
924 // Both arguments are double, right operand is in EAX.
925 __ movsd(XMM1, FieldAddress(EAX, target::Double::value_offset()));
926 __ Bind(&double_op);
927 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // Left argument.
928 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
929 switch (kind) {
930 case Token::kADD:
931 __ addsd(XMM0, XMM1);
932 break;
933 case Token::kSUB:
934 __ subsd(XMM0, XMM1);
935 break;
936 case Token::kMUL:
937 __ mulsd(XMM0, XMM1);
938 break;
939 case Token::kDIV:
940 __ divsd(XMM0, XMM1);
941 break;
942 default:
943 UNREACHABLE();
944 }
945 const Class& double_class = DoubleClass();
946 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
947 EAX, // Result register.
948 EBX);
949 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
950 __ ret();
951 __ Bind(&is_smi);
952 __ SmiUntag(EAX);
953 __ cvtsi2sd(XMM1, EAX);
954 __ jmp(&double_op);
955 __ Bind(normal_ir_body);
956}
957
958void AsmIntrinsifier::Double_add(Assembler* assembler, Label* normal_ir_body) {
959 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kADD);
960}
961
962void AsmIntrinsifier::Double_mul(Assembler* assembler, Label* normal_ir_body) {
963 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kMUL);
964}
965
966void AsmIntrinsifier::Double_sub(Assembler* assembler, Label* normal_ir_body) {
967 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kSUB);
968}
969
970void AsmIntrinsifier::Double_div(Assembler* assembler, Label* normal_ir_body) {
971 DoubleArithmeticOperations(assembler, normal_ir_body, Token::kDIV);
972}
973
974// Left is double, right is integer (Mint or Smi)
975void AsmIntrinsifier::Double_mulFromInteger(Assembler* assembler,
976 Label* normal_ir_body) {
977 // Only smis allowed.
978 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
979 __ testl(EAX, Immediate(kSmiTagMask));
980 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump);
981 // Is Smi.
982 __ SmiUntag(EAX);
983 __ cvtsi2sd(XMM1, EAX);
984 __ movl(EAX, Address(ESP, +2 * target::kWordSize));
985 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
986 __ mulsd(XMM0, XMM1);
987 const Class& double_class = DoubleClass();
988 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
989 EAX, // Result register.
990 EBX);
991 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
992 __ ret();
993 __ Bind(normal_ir_body);
994}
995
996void AsmIntrinsifier::DoubleFromInteger(Assembler* assembler,
997 Label* normal_ir_body) {
998 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
999 __ testl(EAX, Immediate(kSmiTagMask));
1000 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump);
1001 // Is Smi.
1002 __ SmiUntag(EAX);
1003 __ cvtsi2sd(XMM0, EAX);
1004 const Class& double_class = DoubleClass();
1005 __ TryAllocate(double_class, normal_ir_body, Assembler::kNearJump,
1006 EAX, // Result register.
1007 EBX);
1008 __ movsd(FieldAddress(EAX, target::Double::value_offset()), XMM0);
1009 __ ret();
1010 __ Bind(normal_ir_body);
1011}
1012
1013void AsmIntrinsifier::Double_getIsNaN(Assembler* assembler,
1014 Label* normal_ir_body) {
1015 Label is_true;
1016 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1017 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
1018 __ comisd(XMM0, XMM0);
1019 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true;
1020 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1021 __ ret();
1022 __ Bind(&is_true);
1023 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1024 __ ret();
1025}
1026
1027void AsmIntrinsifier::Double_getIsInfinite(Assembler* assembler,
1028 Label* normal_ir_body) {
1029 Label not_inf;
1030 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1031 __ movl(EBX, FieldAddress(EAX, target::Double::value_offset()));
1032
1033 // If the low word isn't zero, then it isn't infinity.
1034 __ cmpl(EBX, Immediate(0));
1035 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1036 // Check the high word.
1037 __ movl(EBX, FieldAddress(
1039 // Mask off sign bit.
1040 __ andl(EBX, Immediate(0x7FFFFFFF));
1041 // Compare with +infinity.
1042 __ cmpl(EBX, Immediate(0x7FF00000));
1043 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1044 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1045 __ ret();
1046
1047 __ Bind(&not_inf);
1048 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1049 __ ret();
1050}
1051
1052void AsmIntrinsifier::Double_getIsNegative(Assembler* assembler,
1053 Label* normal_ir_body) {
1054 Label is_false, is_true, is_zero;
1055 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1056 __ movsd(XMM0, FieldAddress(EAX, target::Double::value_offset()));
1057 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1.
1058 __ comisd(XMM0, XMM1);
1059 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false.
1060 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero.
1061 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false.
1062 __ Bind(&is_true);
1063 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1064 __ ret();
1065 __ Bind(&is_false);
1066 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1067 __ ret();
1068 __ Bind(&is_zero);
1069 // Check for negative zero (get the sign bit).
1070 __ movmskpd(EAX, XMM0);
1071 __ testl(EAX, Immediate(1));
1072 __ j(NOT_ZERO, &is_true, Assembler::kNearJump);
1073 __ jmp(&is_false, Assembler::kNearJump);
1074}
1075
1076// Identity comparison.
1077void AsmIntrinsifier::ObjectEquals(Assembler* assembler,
1078 Label* normal_ir_body) {
1079 Label is_true;
1080 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1081 __ cmpl(EAX, Address(ESP, +2 * target::kWordSize));
1082 __ j(EQUAL, &is_true, Assembler::kNearJump);
1083 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1084 __ ret();
1085 __ Bind(&is_true);
1086 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1087 __ ret();
1088}
1089
1090static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) {
1091 assembler->RangeCheck(cid, kNoRegister, kSmiCid, kMintCid,
1093}
1094
1095static void JumpIfNotInteger(Assembler* assembler,
1096 Register cid,
1097 Label* target) {
1098 assembler->RangeCheck(cid, kNoRegister, kSmiCid, kMintCid,
1100}
1101
1102static void JumpIfString(Assembler* assembler, Register cid, Label* target) {
1103 assembler->RangeCheck(cid, kNoRegister, kOneByteStringCid, kTwoByteStringCid,
1105}
1106
1107static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) {
1108 assembler->RangeCheck(cid, kNoRegister, kOneByteStringCid, kTwoByteStringCid,
1110}
1111
1112static void JumpIfNotList(Assembler* assembler, Register cid, Label* target) {
1113 assembler->RangeCheck(cid, kNoRegister, kArrayCid, kGrowableObjectArrayCid,
1115}
1116
1117static void JumpIfType(Assembler* assembler, Register cid, Label* target) {
1118 COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
1119 (kRecordTypeCid == kTypeCid + 2));
1120 assembler->RangeCheck(cid, kNoRegister, kTypeCid, kRecordTypeCid,
1122}
1123
1124static void JumpIfNotType(Assembler* assembler, Register cid, Label* target) {
1125 COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
1126 (kRecordTypeCid == kTypeCid + 2));
1127 assembler->RangeCheck(cid, kNoRegister, kTypeCid, kRecordTypeCid,
1129}
1130
1131// Return type quickly for simple types (not parameterized and not signature).
1132void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
1133 Label* normal_ir_body) {
1134 Label use_declaration_type, not_double, not_integer, not_string;
1135 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1136 __ LoadClassIdMayBeSmi(EDI, EAX);
1137
1138 __ cmpl(EDI, Immediate(kClosureCid));
1139 __ j(EQUAL, normal_ir_body); // Instance is a closure.
1140
1141 __ cmpl(EDI, Immediate(kRecordCid));
1142 __ j(EQUAL, normal_ir_body); // Instance is a record.
1143
1144 __ cmpl(EDI, Immediate(kNumPredefinedCids));
1145 __ j(ABOVE, &use_declaration_type);
1146
1147 // If object is a instance of _Double return double type.
1148 __ cmpl(EDI, Immediate(kDoubleCid));
1149 __ j(NOT_EQUAL, &not_double);
1150
1151 __ LoadIsolateGroup(EAX);
1154 __ ret();
1155
1156 __ Bind(&not_double);
1157 // If object is an integer (smi, mint or bigint) return int type.
1158 __ movl(EAX, EDI);
1159 JumpIfNotInteger(assembler, EAX, &not_integer);
1160
1161 __ LoadIsolateGroup(EAX);
1164 __ ret();
1165
1166 __ Bind(&not_integer);
1167 // If object is a string (one byte, two byte or external variants) return
1168 // string type.
1169 __ movl(EAX, EDI);
1170 JumpIfNotString(assembler, EAX, &not_string);
1171
1172 __ LoadIsolateGroup(EAX);
1175 __ ret();
1176
1177 __ Bind(&not_string);
1178 // If object is a type or function type, return Dart type.
1179 __ movl(EAX, EDI);
1180 JumpIfNotType(assembler, EAX, &use_declaration_type);
1181
1182 __ LoadIsolateGroup(EAX);
1185 __ ret();
1186
1187 // Object is neither double, nor integer, nor string, nor type.
1188 __ Bind(&use_declaration_type);
1189 __ LoadClassById(EBX, EDI);
1190 __ movzxw(EDI, FieldAddress(EBX, target::Class::num_type_arguments_offset()));
1191 __ cmpl(EDI, Immediate(0));
1192 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1193 __ movl(EAX, FieldAddress(EBX, target::Class::declaration_type_offset()));
1194 __ CompareObject(EAX, NullObject());
1195 __ j(EQUAL, normal_ir_body, Assembler::kNearJump); // Not yet set.
1196 __ ret();
1197
1198 __ Bind(normal_ir_body);
1199}
1200
1201// Compares cid1 and cid2 to see if they're syntactically equivalent. If this
1202// can be determined by this fast path, it jumps to either equal_* or not_equal.
1203// If classes are equivalent but may be generic, then jumps to
1204// equal_may_be_generic. Clobbers scratch.
1205static void EquivalentClassIds(Assembler* assembler,
1206 Label* normal_ir_body,
1207 Label* equal_may_be_generic,
1208 Label* equal_not_generic,
1209 Label* not_equal,
1210 Register cid1,
1211 Register cid2,
1212 Register scratch,
1213 bool testing_instance_cids) {
1214 Label not_integer, not_integer_or_string, not_integer_or_string_or_list;
1215
1216 // Check if left hand side is a closure. Closures are handled in the runtime.
1217 __ cmpl(cid1, Immediate(kClosureCid));
1218 __ j(EQUAL, normal_ir_body);
1219
1220 // Check if left hand side is a record. Records are handled in the runtime.
1221 __ cmpl(cid1, Immediate(kRecordCid));
1222 __ j(EQUAL, normal_ir_body);
1223
1224 // Check whether class ids match. If class ids don't match types may still be
1225 // considered equivalent (e.g. multiple string implementation classes map to a
1226 // single String type).
1227 __ cmpl(cid1, cid2);
1228 __ j(EQUAL, equal_may_be_generic);
1229
1230 // Class ids are different. Check if we are comparing two string types (with
1231 // different representations), two integer types, two list types or two type
1232 // types.
1233 __ cmpl(cid1, Immediate(kNumPredefinedCids));
1234 __ j(ABOVE_EQUAL, not_equal);
1235
1236 // Check if both are integer types.
1237 __ movl(scratch, cid1);
1238 JumpIfNotInteger(assembler, scratch, &not_integer);
1239
1240 // First type is an integer. Check if the second is an integer too.
1241 __ movl(scratch, cid2);
1242 JumpIfInteger(assembler, scratch, equal_not_generic);
1243 // Integer types are only equivalent to other integer types.
1244 __ jmp(not_equal);
1245
1246 __ Bind(&not_integer);
1247 // Check if both are String types.
1248 __ movl(scratch, cid1);
1249 JumpIfNotString(assembler, scratch,
1250 testing_instance_cids ? &not_integer_or_string : not_equal);
1251
1252 // First type is a String. Check if the second is a String too.
1253 __ movl(scratch, cid2);
1254 JumpIfString(assembler, scratch, equal_not_generic);
1255 // String types are only equivalent to other String types.
1256 __ jmp(not_equal);
1257
1258 if (testing_instance_cids) {
1259 __ Bind(&not_integer_or_string);
1260 // Check if both are List types.
1261 __ movl(scratch, cid1);
1262 JumpIfNotList(assembler, scratch, &not_integer_or_string_or_list);
1263
1264 // First type is a List. Check if the second is a List too.
1265 __ movl(scratch, cid2);
1266 JumpIfNotList(assembler, scratch, not_equal);
1269 __ jmp(equal_may_be_generic);
1270
1271 __ Bind(&not_integer_or_string_or_list);
1272 // Check if the first type is a Type. If it is not then types are not
1273 // equivalent because they have different class ids and they are not String
1274 // or integer or List or Type.
1275 __ movl(scratch, cid1);
1276 JumpIfNotType(assembler, scratch, not_equal);
1277
1278 // First type is a Type. Check if the second is a Type too.
1279 __ movl(scratch, cid2);
1280 JumpIfType(assembler, scratch, equal_not_generic);
1281 // Type types are only equivalent to other Type types.
1282 __ jmp(not_equal);
1283 }
1284}
1285
1286void AsmIntrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler,
1287 Label* normal_ir_body) {
1288 __ movl(EAX, Address(ESP, +1 * target::kWordSize));
1289 __ LoadClassIdMayBeSmi(EDI, EAX);
1290
1291 __ movl(EAX, Address(ESP, +2 * target::kWordSize));
1292 __ LoadClassIdMayBeSmi(EBX, EAX);
1293
1294 Label equal_may_be_generic, equal, not_equal;
1295 EquivalentClassIds(assembler, normal_ir_body, &equal_may_be_generic, &equal,
1296 &not_equal, EDI, EBX, EAX,
1297 /* testing_instance_cids = */ true);
1298
1299 __ Bind(&equal_may_be_generic);
1300 // Classes are equivalent and neither is a closure class.
1301 // Check if there are no type arguments. In this case we can return true.
1302 // Otherwise fall through into the runtime to handle comparison.
1303 __ LoadClassById(EAX, EDI);
1304 __ movl(
1305 EAX,
1306 FieldAddress(
1307 EAX,
1309 __ cmpl(EAX, Immediate(target::Class::kNoTypeArguments));
1310 __ j(EQUAL, &equal);
1311
1312 // Compare type arguments, host_type_arguments_field_offset_in_words in EAX.
1313 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1314 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1315 __ movl(EDI, FieldAddress(EDI, EAX, TIMES_4, 0));
1316 __ movl(EBX, FieldAddress(EBX, EAX, TIMES_4, 0));
1317 __ cmpl(EDI, EBX);
1318 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1319 // Fall through to equal case if type arguments are equal.
1320
1321 __ Bind(&equal);
1322 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1323 __ ret();
1324
1325 __ Bind(&not_equal);
1326 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1327 __ ret();
1328
1329 __ Bind(normal_ir_body);
1330}
1331
1332void AsmIntrinsifier::String_getHashCode(Assembler* assembler,
1333 Label* normal_ir_body) {
1334 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // String object.
1335 __ movl(EAX, FieldAddress(EAX, target::String::hash_offset()));
1336 __ cmpl(EAX, Immediate(0));
1337 __ j(EQUAL, normal_ir_body, Assembler::kNearJump);
1338 __ ret();
1339 __ Bind(normal_ir_body);
1340 // Hash not yet computed.
1341}
1342
1343void AsmIntrinsifier::Type_equality(Assembler* assembler,
1344 Label* normal_ir_body) {
1345 Label equal, not_equal, equiv_cids_may_be_generic, equiv_cids;
1346
1347 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1348 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1349 __ cmpl(EDI, EBX);
1350 __ j(EQUAL, &equal);
1351
1352 // EDI might not be a Type object, so check that first (EBX should be though,
1353 // since this is a method on the Type class).
1354 __ LoadClassIdMayBeSmi(EAX, EDI);
1355 __ cmpl(EAX, Immediate(kTypeCid));
1356 __ j(NOT_EQUAL, normal_ir_body);
1357
1358 // Check if types are syntactically equal.
1359 __ LoadTypeClassId(ECX, EDI);
1360 __ LoadTypeClassId(EDX, EBX);
1361 // We are not testing instance cids, but type class cids of Type instances.
1362 EquivalentClassIds(assembler, normal_ir_body, &equiv_cids_may_be_generic,
1363 &equiv_cids, &not_equal, ECX, EDX, EAX,
1364 /* testing_instance_cids = */ false);
1365
1366 __ Bind(&equiv_cids_may_be_generic);
1367 // Compare type arguments in Type instances.
1368 __ movl(ECX, FieldAddress(EDI, target::Type::arguments_offset()));
1369 __ movl(EDX, FieldAddress(EBX, target::Type::arguments_offset()));
1370 __ cmpl(ECX, EDX);
1371 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1372 // Fall through to check nullability if type arguments are equal.
1373
1374 // Check nullability.
1375 __ Bind(&equiv_cids);
1376 __ LoadAbstractTypeNullability(EDI, EDI);
1377 __ LoadAbstractTypeNullability(EBX, EBX);
1378 __ cmpl(EDI, EBX);
1379 __ j(NOT_EQUAL, &not_equal, Assembler::kNearJump);
1380 // Fall through to equal case if nullability is strictly equal.
1381
1382 __ Bind(&equal);
1383 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1384 __ ret();
1385
1386 __ Bind(&not_equal);
1387 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1388 __ ret();
1389
1390 __ Bind(normal_ir_body);
1391}
1392
1393void AsmIntrinsifier::AbstractType_getHashCode(Assembler* assembler,
1394 Label* normal_ir_body) {
1395 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // AbstractType object.
1396 __ movl(EAX, FieldAddress(EAX, target::AbstractType::hash_offset()));
1397 __ testl(EAX, EAX);
1398 __ j(EQUAL, normal_ir_body, Assembler::kNearJump);
1399 __ ret();
1400 __ Bind(normal_ir_body);
1401 // Hash not yet computed.
1402}
1403
1404void AsmIntrinsifier::AbstractType_equality(Assembler* assembler,
1405 Label* normal_ir_body) {
1406 __ movl(EDI, Address(ESP, +1 * target::kWordSize));
1407 __ movl(EBX, Address(ESP, +2 * target::kWordSize));
1408 __ cmpl(EDI, EBX);
1409 __ j(NOT_EQUAL, normal_ir_body);
1410
1411 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1412 __ ret();
1413
1414 __ Bind(normal_ir_body);
1415}
1416
1417// bool _substringMatches(int start, String other)
1418void AsmIntrinsifier::StringBaseSubstringMatches(Assembler* assembler,
1419 Label* normal_ir_body) {
1420 // For precompilation, not implemented on IA32.
1421}
1422
1423void AsmIntrinsifier::Object_getHash(Assembler* assembler,
1424 Label* normal_ir_body) {
1425 UNREACHABLE();
1426}
1427
1428void AsmIntrinsifier::StringBaseCharAt(Assembler* assembler,
1429 Label* normal_ir_body) {
1430 Label try_two_byte_string;
1431 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Index.
1432 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // String.
1433 __ testl(EBX, Immediate(kSmiTagMask));
1434 __ j(NOT_ZERO, normal_ir_body, Assembler::kNearJump); // Non-smi index.
1435 // Range check.
1436 __ cmpl(EBX, FieldAddress(EAX, target::String::length_offset()));
1437 // Runtime throws exception.
1438 __ j(ABOVE_EQUAL, normal_ir_body, Assembler::kNearJump);
1439 __ CompareClassId(EAX, kOneByteStringCid, EDI);
1440 __ j(NOT_EQUAL, &try_two_byte_string, Assembler::kNearJump);
1441 __ SmiUntag(EBX);
1442 __ movzxb(EBX, FieldAddress(EAX, EBX, TIMES_1,
1445 __ j(GREATER_EQUAL, normal_ir_body);
1446 __ movl(EAX, Immediate(SymbolsPredefinedAddress()));
1447 __ movl(EAX, Address(EAX, EBX, TIMES_4,
1450 __ ret();
1451
1452 __ Bind(&try_two_byte_string);
1453 __ CompareClassId(EAX, kTwoByteStringCid, EDI);
1454 __ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
1455 ASSERT(kSmiTagShift == 1);
1456 __ movzxw(EBX, FieldAddress(EAX, EBX, TIMES_1,
1459 __ j(GREATER_EQUAL, normal_ir_body);
1460 __ movl(EAX, Immediate(SymbolsPredefinedAddress()));
1461 __ movl(EAX, Address(EAX, EBX, TIMES_4,
1464 __ ret();
1465
1466 __ Bind(normal_ir_body);
1467}
1468
1469void AsmIntrinsifier::StringBaseIsEmpty(Assembler* assembler,
1470 Label* normal_ir_body) {
1471 Label is_true;
1472 // Get length.
1473 __ movl(EAX, Address(ESP, +1 * target::kWordSize)); // String object.
1474 __ movl(EAX, FieldAddress(EAX, target::String::length_offset()));
1475 __ cmpl(EAX, Immediate(target::ToRawSmi(0)));
1476 __ j(EQUAL, &is_true, Assembler::kNearJump);
1477 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1478 __ ret();
1479 __ Bind(&is_true);
1480 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1481 __ ret();
1482}
1483
1484void AsmIntrinsifier::OneByteString_getHashCode(Assembler* assembler,
1485 Label* normal_ir_body) {
1486 Label compute_hash;
1487 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // OneByteString object.
1488 __ movl(EAX, FieldAddress(EBX, target::String::hash_offset()));
1489 __ cmpl(EAX, Immediate(0));
1490 __ j(EQUAL, &compute_hash, Assembler::kNearJump);
1491 __ ret();
1492
1493 __ Bind(&compute_hash);
1494 // Hash not yet computed, use algorithm of class StringHasher.
1495 __ movl(ECX, FieldAddress(EBX, target::String::length_offset()));
1496 __ SmiUntag(ECX);
1497 __ xorl(EAX, EAX);
1498 __ xorl(EDI, EDI);
1499 // EBX: Instance of OneByteString.
1500 // ECX: String length, untagged integer.
1501 // EDI: Loop counter, untagged integer.
1502 // EAX: Hash code, untagged integer.
1503 Label loop, done;
1504 __ Bind(&loop);
1505 __ cmpl(EDI, ECX);
1507 // Add to hash code: (hash_ is uint32)
1508 // Get one characters (ch).
1509 __ movzxb(EDX, FieldAddress(EBX, EDI, TIMES_1,
1511 // EDX: ch and temporary.
1513
1514 __ incl(EDI);
1515 __ jmp(&loop, Assembler::kNearJump);
1516
1517 __ Bind(&done);
1518 // Finalize and fit to size kHashBits. Ensures hash is non-zero.
1519 __ FinalizeHashForSize(target::String::kHashBits, EAX, EDX);
1520 __ SmiTag(EAX);
1521 __ StoreIntoSmiField(FieldAddress(EBX, target::String::hash_offset()), EAX);
1522 __ ret();
1523}
1524
1525// Allocates a _OneByteString or _TwoByteString. The content is not initialized.
1526// 'length_reg' contains the desired length as a _Smi or _Mint.
1527// Returns new string as tagged pointer in EAX.
1528static void TryAllocateString(Assembler* assembler,
1529 classid_t cid,
1530 intptr_t max_elements,
1531 Label* ok,
1532 Label* failure,
1533 Register length_reg) {
1534 ASSERT(cid == kOneByteStringCid || cid == kTwoByteStringCid);
1535 // _Mint length: call to runtime to produce error.
1536 __ BranchIfNotSmi(length_reg, failure);
1537 // negative length: call to runtime to produce error.
1538 // Too big: call to runtime to allocate old.
1539 __ cmpl(length_reg, Immediate(target::ToRawSmi(max_elements)));
1540 __ j(ABOVE, failure);
1541
1542 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, failure, EAX));
1543 if (length_reg != EDI) {
1544 __ movl(EDI, length_reg);
1545 }
1546 Label pop_and_fail;
1547 __ pushl(EDI); // Preserve length.
1548 if (cid == kOneByteStringCid) {
1549 __ SmiUntag(EDI);
1550 } else {
1551 // Untag length and multiply by element size -> no-op.
1552 }
1553 const intptr_t fixed_size_plus_alignment_padding =
1556 __ leal(EDI, Address(EDI, TIMES_1,
1557 fixed_size_plus_alignment_padding)); // EDI is untagged.
1559
1560 __ movl(EAX, Address(THR, target::Thread::top_offset()));
1561 __ movl(EBX, EAX);
1562
1563 // EDI: allocation size.
1564 __ addl(EBX, EDI);
1565 __ j(CARRY, &pop_and_fail);
1566
1567 // Check if the allocation fits into the remaining space.
1568 // EAX: potential new object start.
1569 // EBX: potential next object start.
1570 // EDI: allocation size.
1571 __ cmpl(EBX, Address(THR, target::Thread::end_offset()));
1572 __ j(ABOVE_EQUAL, &pop_and_fail);
1573 __ CheckAllocationCanary(EAX);
1574
1575 // Successfully allocated the object(s), now update top to point to
1576 // next object start and initialize the object.
1577 __ movl(Address(THR, target::Thread::top_offset()), EBX);
1578 __ addl(EAX, Immediate(kHeapObjectTag));
1579 // Clear last double word to ensure string comparison doesn't need to
1580 // specially handle remainder of strings with lengths not factors of double
1581 // offsets.
1583 __ movl(Address(EBX, -1 * target::kWordSize), Immediate(0));
1584 __ movl(Address(EBX, -2 * target::kWordSize), Immediate(0));
1585 // Initialize the tags.
1586 // EAX: new object start as a tagged pointer.
1587 // EBX: new object end address.
1588 // EDI: allocation size.
1589 {
1590 Label size_tag_overflow, done;
1592 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
1596
1597 __ Bind(&size_tag_overflow);
1598 __ xorl(EDI, EDI);
1599 __ Bind(&done);
1600
1601 // Get the class index and insert it into the tags.
1602 const uword tags =
1603 target::MakeTagWordForNewSpaceObject(cid, /*instance_size=*/0);
1604 __ orl(EDI, Immediate(tags));
1605 __ movl(FieldAddress(EAX, target::Object::tags_offset()), EDI); // Tags.
1606 }
1607
1608 // Set the length field.
1609 __ popl(EDI);
1610 __ StoreIntoObjectNoBarrier(
1611 EAX, FieldAddress(EAX, target::String::length_offset()), EDI);
1612 // Clear hash.
1613 __ ZeroInitSmiField(FieldAddress(EAX, target::String::hash_offset()));
1615
1616 __ Bind(&pop_and_fail);
1617 __ popl(EDI);
1618 __ jmp(failure);
1619}
1620
1621// Arg0: OneByteString (receiver)
1622// Arg1: Start index as Smi.
1623// Arg2: End index as Smi.
1624// The indexes must be valid.
1625void AsmIntrinsifier::OneByteString_substringUnchecked(Assembler* assembler,
1626 Label* normal_ir_body) {
1627 const intptr_t kStringOffset = 3 * target::kWordSize;
1628 const intptr_t kStartIndexOffset = 2 * target::kWordSize;
1629 const intptr_t kEndIndexOffset = 1 * target::kWordSize;
1630 Label ok;
1631 __ movl(EAX, Address(ESP, +kStartIndexOffset));
1632 __ movl(EDI, Address(ESP, +kEndIndexOffset));
1633 __ orl(EAX, EDI);
1634 __ testl(EAX, Immediate(kSmiTagMask));
1635 __ j(NOT_ZERO, normal_ir_body); // 'start', 'end' not Smi.
1636
1637 __ subl(EDI, Address(ESP, +kStartIndexOffset));
1638 TryAllocateString(assembler, kOneByteStringCid,
1640 normal_ir_body, EDI);
1641 __ Bind(&ok);
1642 // EAX: new string as tagged pointer.
1643 // Copy string.
1644 __ movl(EDI, Address(ESP, +kStringOffset));
1645 __ movl(EBX, Address(ESP, +kStartIndexOffset));
1646 __ SmiUntag(EBX);
1647 __ leal(EDI, FieldAddress(EDI, EBX, TIMES_1,
1649 // EDI: Start address to copy from (untagged).
1650 // EBX: Untagged start index.
1651 __ movl(ECX, Address(ESP, +kEndIndexOffset));
1652 __ SmiUntag(ECX);
1653 __ subl(ECX, EBX);
1654 __ xorl(EDX, EDX);
1655 // EDI: Start address to copy from (untagged).
1656 // ECX: Untagged number of bytes to copy.
1657 // EAX: Tagged result string.
1658 // EDX: Loop counter.
1659 // EBX: Scratch register.
1660 Label loop, check;
1662 __ Bind(&loop);
1663 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0));
1664 __ movb(FieldAddress(EAX, EDX, TIMES_1, target::OneByteString::data_offset()),
1665 BL);
1666 __ incl(EDX);
1667 __ Bind(&check);
1668 __ cmpl(EDX, ECX);
1669 __ j(LESS, &loop, Assembler::kNearJump);
1670 __ ret();
1671 __ Bind(normal_ir_body);
1672}
1673
1674void AsmIntrinsifier::WriteIntoOneByteString(Assembler* assembler,
1675 Label* normal_ir_body) {
1676 __ movl(ECX, Address(ESP, +1 * target::kWordSize)); // Value.
1677 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Index.
1678 __ movl(EAX, Address(ESP, +3 * target::kWordSize)); // OneByteString.
1679 __ SmiUntag(EBX);
1680 __ SmiUntag(ECX);
1681 __ movb(FieldAddress(EAX, EBX, TIMES_1, target::OneByteString::data_offset()),
1682 CL);
1683 __ ret();
1684}
1685
1686void AsmIntrinsifier::WriteIntoTwoByteString(Assembler* assembler,
1687 Label* normal_ir_body) {
1688 __ movl(ECX, Address(ESP, +1 * target::kWordSize)); // Value.
1689 __ movl(EBX, Address(ESP, +2 * target::kWordSize)); // Index.
1690 __ movl(EAX, Address(ESP, +3 * target::kWordSize)); // TwoByteString.
1691 // Untag index and multiply by element size -> no-op.
1692 __ SmiUntag(ECX);
1693 __ movw(FieldAddress(EAX, EBX, TIMES_1, target::TwoByteString::data_offset()),
1694 ECX);
1695 __ ret();
1696}
1697
1698void AsmIntrinsifier::AllocateOneByteString(Assembler* assembler,
1699 Label* normal_ir_body) {
1700 __ movl(EDI, Address(ESP, +1 * target::kWordSize)); // Length.
1701 Label ok;
1702 TryAllocateString(assembler, kOneByteStringCid,
1704 normal_ir_body, EDI);
1705 // EDI: Start address to copy from (untagged).
1706
1707 __ Bind(&ok);
1708 __ ret();
1709
1710 __ Bind(normal_ir_body);
1711}
1712
1713void AsmIntrinsifier::AllocateTwoByteString(Assembler* assembler,
1714 Label* normal_ir_body) {
1715 __ movl(EDI, Address(ESP, +1 * target::kWordSize)); // Length.
1716 Label ok;
1717 TryAllocateString(assembler, kTwoByteStringCid,
1719 normal_ir_body, EDI);
1720 // EDI: Start address to copy from (untagged).
1721
1722 __ Bind(&ok);
1723 __ ret();
1724
1725 __ Bind(normal_ir_body);
1726}
1727
1728void AsmIntrinsifier::OneByteString_equality(Assembler* assembler,
1729 Label* normal_ir_body) {
1730 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // This.
1731 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Other.
1732
1733 StringEquality(assembler, EAX, EBX, EDI, ECX, EAX, normal_ir_body,
1734 kOneByteStringCid);
1735}
1736
1737void AsmIntrinsifier::TwoByteString_equality(Assembler* assembler,
1738 Label* normal_ir_body) {
1739 __ movl(EAX, Address(ESP, +2 * target::kWordSize)); // This.
1740 __ movl(EBX, Address(ESP, +1 * target::kWordSize)); // Other.
1741
1742 StringEquality(assembler, EAX, EBX, EDI, ECX, EAX, normal_ir_body,
1743 kTwoByteStringCid);
1744}
1745
1746void AsmIntrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler,
1747 Label* normal_ir_body,
1748 bool sticky) {
1749 if (FLAG_interpret_irregexp) return;
1750
1751 const intptr_t kRegExpParamOffset = 3 * target::kWordSize;
1752 const intptr_t kStringParamOffset = 2 * target::kWordSize;
1753 // start_index smi is located at offset 1.
1754
1755 // Incoming registers:
1756 // EAX: Function. (Will be loaded with the specialized matcher function.)
1757 // ECX: Unknown. (Must be GC safe on tail call.)
1758 // EDX: Arguments descriptor. (Will be preserved.)
1759
1760 // Load the specialized function pointer into EAX. Leverage the fact the
1761 // string CIDs as well as stored function pointers are in sequence.
1762 __ movl(EBX, Address(ESP, kRegExpParamOffset));
1763 __ movl(EDI, Address(ESP, kStringParamOffset));
1764 __ LoadClassId(EDI, EDI);
1765 __ SubImmediate(EDI, Immediate(kOneByteStringCid));
1766 __ movl(FUNCTION_REG, FieldAddress(EBX, EDI, TIMES_4,
1768 kOneByteStringCid, sticky)));
1769
1770 // Registers are now set up for the lazy compile stub. It expects the function
1771 // in EAX, the argument descriptor in EDX, and IC-Data in ECX.
1772 __ xorl(ECX, ECX);
1773
1774 // Tail-call the function.
1776}
1777
1778void AsmIntrinsifier::UserTag_defaultTag(Assembler* assembler,
1779 Label* normal_ir_body) {
1780 __ LoadIsolate(EAX);
1781 __ movl(EAX, Address(EAX, target::Isolate::default_tag_offset()));
1782 __ ret();
1783}
1784
1785void AsmIntrinsifier::Profiler_getCurrentTag(Assembler* assembler,
1786 Label* normal_ir_body) {
1787 __ LoadIsolate(EAX);
1788 __ movl(EAX, Address(EAX, target::Isolate::current_tag_offset()));
1789 __ ret();
1790}
1791
1792void AsmIntrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler,
1793 Label* normal_ir_body) {
1794#if !defined(SUPPORT_TIMELINE)
1795 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1796 __ ret();
1797#else
1798 Label true_label;
1799 // Load TimelineStream*.
1800 __ movl(EAX, Address(THR, target::Thread::dart_stream_offset()));
1801 // Load uintptr_t from TimelineStream*.
1803 __ cmpl(EAX, Immediate(0));
1804 __ j(NOT_ZERO, &true_label, Assembler::kNearJump);
1805 // Not enabled.
1806 __ LoadObject(EAX, CastHandle<Object>(FalseObject()));
1807 __ ret();
1808 // Enabled.
1809 __ Bind(&true_label);
1810 __ LoadObject(EAX, CastHandle<Object>(TrueObject()));
1811 __ ret();
1812#endif
1813}
1814
1815void AsmIntrinsifier::Timeline_getNextTaskId(Assembler* assembler,
1816 Label* normal_ir_body) {
1817#if !defined(SUPPORT_TIMELINE)
1818 __ LoadImmediate(EAX, target::ToRawSmi(0));
1819 __ ret();
1820#else
1821 __ movl(EBX, Address(THR, target::Thread::next_task_id_offset()));
1822 __ movl(ECX, Address(THR, target::Thread::next_task_id_offset() + 4));
1823 __ movl(EAX, EBX);
1824 __ SmiTag(EAX); // Ignore loss of precision.
1825 __ addl(EBX, Immediate(1));
1826 __ adcl(ECX, Immediate(0));
1827 __ movl(Address(THR, target::Thread::next_task_id_offset()), EBX);
1828 __ movl(Address(THR, target::Thread::next_task_id_offset() + 4), ECX);
1829 __ ret();
1830#endif
1831}
1832
1833#undef __
1834
1835} // namespace compiler
1836} // namespace dart
1837
1838#endif // defined(TARGET_ARCH_IA32)
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition: DM.cpp:263
static bool equal(const SkBitmap &a, const SkBitmap &b)
Definition: ImageTest.cpp:1395
#define check(reporter, ref, unref, make, kill)
Definition: RefCntTest.cpp:85
static bool ok(int result)
#define __
#define UNREACHABLE()
Definition: assert.h:248
static word type_arguments_offset()
static word declaration_type_offset()
static word host_type_arguments_field_offset_in_words_offset()
static const word kNoTypeArguments
Definition: runtime_api.h:486
static word num_type_arguments_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word function_offset(classid_t cid, bool sticky)
static const word kHashBits
Definition: runtime_api.h:782
static const word kNullCharCodeSymbolOffset
Definition: runtime_api.h:1533
static const word kNumberOfOneCharCodeSymbols
Definition: runtime_api.h:1532
#define ASSERT(E)
uint32_t * target
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
Definition: runtime_api.cc:360
word ToRawSmi(const dart::Object &a)
Definition: runtime_api.cc:960
static constexpr intptr_t kWordSize
Definition: runtime_api.h:274
constexpr intptr_t kSmiBits
Definition: runtime_api.h:301
const Bool & TrueObject()
Definition: runtime_api.cc:157
const Bool & FalseObject()
Definition: runtime_api.cc:161
const Object & NullObject()
Definition: runtime_api.cc:149
const Class & DoubleClass()
Definition: runtime_api.cc:195
const Class & MintClass()
Definition: runtime_api.cc:190
Definition: dart_vm.cc:33
const Register THR
static bool CompareIntegers(Token::Kind kind, const Integer &left, const Integer &right)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition: hash.h:12
int32_t classid_t
Definition: globals.h:524
@ kNumPredefinedCids
Definition: class_id.h:257
uintptr_t uword
Definition: globals.h:501
@ OVERFLOW
@ GREATER_EQUAL
@ NOT_CARRY
Definition: constants_x86.h:35
@ NOT_ZERO
@ NEGATIVE
Definition: constants_x86.h:32
@ LESS_EQUAL
@ BELOW_EQUAL
Definition: constants_x86.h:19
@ NOT_EQUAL
@ ABOVE_EQUAL
Definition: constants_x86.h:16
@ PARITY_EVEN
Definition: constants_x86.h:23
@ kNoRegister
Definition: constants_arm.h:99
const intptr_t cid
const Register FUNCTION_REG
const intptr_t kBytesPerBigIntDigit
Definition: globals.h:54
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
@ kHeapObjectTag
@ kSmiTagMask
@ kSmiTagShift
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
SIN Vec< N, uint16_t > mull(const Vec< N, uint8_t > &x, const Vec< N, uint8_t > &y)
Definition: SkVx.h:906
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment