Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
assembler_ia32.cc
Go to the documentation of this file.
1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/globals.h" // NOLINT
6#if defined(TARGET_ARCH_IA32)
7
8#define SHOULD_NOT_INCLUDE_RUNTIME
9
10#include "vm/class_id.h"
13#include "vm/cpu.h"
14#include "vm/instructions.h"
15#include "vm/tags.h"
16
17namespace dart {
18
19namespace compiler {
20
21class DirectCallRelocation : public AssemblerFixup {
22 public:
23 void Process(const MemoryRegion& region, intptr_t position) {
24 // Direct calls are relative to the following instruction on x86.
25 int32_t pointer = region.Load<int32_t>(position);
26 int32_t delta = region.start() + position + sizeof(int32_t);
27 region.Store<int32_t>(position, pointer - delta);
28 }
29
30 virtual bool IsPointerOffset() const { return false; }
31};
32
33int32_t Assembler::jit_cookie() {
34 if (jit_cookie_ == 0) {
35 jit_cookie_ = CreateJitCookie();
36 }
37 return jit_cookie_;
38}
39
40void Assembler::call(Register reg) {
41 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
42 EmitUint8(0xFF);
43 EmitRegisterOperand(2, reg);
44}
45
46void Assembler::call(const Address& address) {
47 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
48 EmitUint8(0xFF);
49 EmitOperand(2, address);
50}
51
52void Assembler::call(Label* label) {
53 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
54 EmitUint8(0xE8);
55 const int kSize = 5;
56 EmitLabel(label, kSize);
57}
58
59void Assembler::call(const ExternalLabel* label) {
60 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
61 intptr_t call_start = buffer_.GetPosition();
62 EmitUint8(0xE8);
63 EmitFixup(new DirectCallRelocation());
64 EmitInt32(label->address());
66}
67
68void Assembler::pushl(Register reg) {
69 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
70 EmitUint8(0x50 + reg);
71}
72
73void Assembler::pushl(const Address& address) {
74 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
75 EmitUint8(0xFF);
76 EmitOperand(6, address);
77}
78
79void Assembler::pushl(const Immediate& imm) {
80 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
81 if (imm.is_int8()) {
82 EmitUint8(0x6A);
83 EmitUint8(imm.value() & 0xFF);
84 } else {
85 EmitUint8(0x68);
86 EmitImmediate(imm);
87 }
88}
89
90void Assembler::popl(Register reg) {
91 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
92 EmitUint8(0x58 + reg);
93}
94
95void Assembler::popl(const Address& address) {
96 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
97 EmitUint8(0x8F);
98 EmitOperand(0, address);
99}
100
101void Assembler::pushal() {
102 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
103 EmitUint8(0x60);
104}
105
106void Assembler::popal() {
107 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
108 EmitUint8(0x61);
109}
110
111void Assembler::setcc(Condition condition, ByteRegister dst) {
112 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
113 EmitUint8(0x0F);
114 EmitUint8(0x90 + condition);
115 EmitUint8(0xC0 + dst);
116}
117
118void Assembler::movl(Register dst, const Immediate& imm) {
119 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
120 EmitUint8(0xB8 + dst);
121 EmitImmediate(imm);
122}
123
124void Assembler::movl(Register dst, Register src) {
125 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
126 EmitUint8(0x89);
127 EmitRegisterOperand(src, dst);
128}
129
130void Assembler::movl(Register dst, const Address& src) {
131 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
132 EmitUint8(0x8B);
133 EmitOperand(dst, src);
134}
135
136void Assembler::movl(const Address& dst, Register src) {
137 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
138 EmitUint8(0x89);
139 EmitOperand(src, dst);
140}
141
142void Assembler::movl(const Address& dst, const Immediate& imm) {
143 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
144 EmitUint8(0xC7);
145 EmitOperand(0, dst);
146 EmitImmediate(imm);
147}
148
150 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
151 EmitUint8(0x0F);
152 EmitUint8(0xB6);
153 EmitRegisterOperand(dst, src);
154}
155
156void Assembler::movzxb(Register dst, const Address& src) {
157 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
158 EmitUint8(0x0F);
159 EmitUint8(0xB6);
160 EmitOperand(dst, src);
161}
162
164 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
165 EmitUint8(0x0F);
166 EmitUint8(0xBE);
167 EmitRegisterOperand(dst, src);
168}
169
170void Assembler::movsxb(Register dst, const Address& src) {
171 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
172 EmitUint8(0x0F);
173 EmitUint8(0xBE);
174 EmitOperand(dst, src);
175}
176
177void Assembler::movb(Register dst, const Address& src) {
178 // This would leave 24 bits above the 1 byte value undefined.
179 // If we ever want to purposefully have those undefined, remove this.
180 // TODO(dartbug.com/40210): Allow this.
181 FATAL("Use movzxb or movsxb instead.");
182}
183
184void Assembler::movb(const Address& dst, Register src) {
185 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
186 EmitUint8(0x88);
187 EmitOperand(src, dst);
188}
189
190void Assembler::movb(const Address& dst, ByteRegister src) {
191 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
192 EmitUint8(0x88);
193 EmitOperand(src, dst);
194}
195
196void Assembler::movb(const Address& dst, const Immediate& imm) {
197 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
198 EmitUint8(0xC6);
199 EmitOperand(EAX, dst);
200 ASSERT(imm.is_int8());
201 EmitUint8(imm.value() & 0xFF);
202}
203
204void Assembler::movzxw(Register dst, Register src) {
205 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
206 EmitUint8(0x0F);
207 EmitUint8(0xB7);
208 EmitRegisterOperand(dst, src);
209}
210
211void Assembler::movzxw(Register dst, const Address& src) {
212 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
213 EmitUint8(0x0F);
214 EmitUint8(0xB7);
215 EmitOperand(dst, src);
216}
217
218void Assembler::movsxw(Register dst, Register src) {
219 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
220 EmitUint8(0x0F);
221 EmitUint8(0xBF);
222 EmitRegisterOperand(dst, src);
223}
224
225void Assembler::movsxw(Register dst, const Address& src) {
226 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
227 EmitUint8(0x0F);
228 EmitUint8(0xBF);
229 EmitOperand(dst, src);
230}
231
232void Assembler::movw(Register dst, const Address& src) {
233 // This would leave 16 bits above the 2 byte value undefined.
234 // If we ever want to purposefully have those undefined, remove this.
235 // TODO(dartbug.com/40210): Allow this.
236 FATAL("Use movzxw or movsxw instead.");
237}
238
239void Assembler::movw(const Address& dst, Register src) {
240 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
241 EmitOperandSizeOverride();
242 EmitUint8(0x89);
243 EmitOperand(src, dst);
244}
245
246void Assembler::movw(const Address& dst, const Immediate& imm) {
247 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
248 EmitOperandSizeOverride();
249 EmitUint8(0xC7);
250 EmitOperand(0, dst);
251 EmitUint8(imm.value() & 0xFF);
252 EmitUint8((imm.value() >> 8) & 0xFF);
253}
254
255void Assembler::leal(Register dst, const Address& src) {
256 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
257 EmitUint8(0x8D);
258 EmitOperand(dst, src);
259}
260
261// Move if not overflow.
262void Assembler::cmovno(Register dst, Register src) {
263 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
264 EmitUint8(0x0F);
265 EmitUint8(0x41);
266 EmitRegisterOperand(dst, src);
267}
268
269void Assembler::cmove(Register dst, Register src) {
270 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
271 EmitUint8(0x0F);
272 EmitUint8(0x44);
273 EmitRegisterOperand(dst, src);
274}
275
276void Assembler::cmovne(Register dst, Register src) {
277 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
278 EmitUint8(0x0F);
279 EmitUint8(0x45);
280 EmitRegisterOperand(dst, src);
281}
282
283void Assembler::cmovs(Register dst, Register src) {
284 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
285 EmitUint8(0x0F);
286 EmitUint8(0x48);
287 EmitRegisterOperand(dst, src);
288}
289
290void Assembler::cmovns(Register dst, Register src) {
291 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
292 EmitUint8(0x0F);
293 EmitUint8(0x49);
294 EmitRegisterOperand(dst, src);
295}
296
297void Assembler::cmovgel(Register dst, Register src) {
298 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
299 EmitUint8(0x0F);
300 EmitUint8(0x4D);
301 EmitRegisterOperand(dst, src);
302}
303
305 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
306 EmitUint8(0x0F);
307 EmitUint8(0x4C);
308 EmitRegisterOperand(dst, src);
309}
310
312 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
313 EmitUint8(0xF3);
314 EmitUint8(0xA4);
315}
316
318 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
319 EmitUint8(0xF3);
320 EmitUint8(0x66);
321 EmitUint8(0xA5);
322}
323
325 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
326 EmitUint8(0xF3);
327 EmitUint8(0xA5);
328}
329
330void Assembler::movss(XmmRegister dst, const Address& src) {
331 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
332 EmitUint8(0xF3);
333 EmitUint8(0x0F);
334 EmitUint8(0x10);
335 EmitOperand(dst, src);
336}
337
338void Assembler::movss(const Address& dst, XmmRegister src) {
339 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
340 EmitUint8(0xF3);
341 EmitUint8(0x0F);
342 EmitUint8(0x11);
343 EmitOperand(src, dst);
344}
345
347 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
348 EmitUint8(0xF3);
349 EmitUint8(0x0F);
350 EmitUint8(0x11);
351 EmitXmmRegisterOperand(src, dst);
352}
353
354void Assembler::movd(XmmRegister dst, Register src) {
355 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
356 EmitUint8(0x66);
357 EmitUint8(0x0F);
358 EmitUint8(0x6E);
359 EmitOperand(dst, Operand(src));
360}
361
362void Assembler::movd(Register dst, XmmRegister src) {
363 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
364 EmitUint8(0x66);
365 EmitUint8(0x0F);
366 EmitUint8(0x7E);
367 EmitOperand(src, Operand(dst));
368}
369
370void Assembler::movq(const Address& dst, XmmRegister src) {
371 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
372 EmitUint8(0x66);
373 EmitUint8(0x0F);
374 EmitUint8(0xD6);
375 EmitOperand(src, Operand(dst));
376}
377
378void Assembler::movq(XmmRegister dst, const Address& src) {
379 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
380 EmitUint8(0xF3);
381 EmitUint8(0x0F);
382 EmitUint8(0x7E);
383 EmitOperand(dst, Operand(src));
384}
385
387 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
388 EmitUint8(0xF3);
389 EmitUint8(0x0F);
390 EmitUint8(0x58);
391 EmitXmmRegisterOperand(dst, src);
392}
393
394void Assembler::addss(XmmRegister dst, const Address& src) {
395 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
396 EmitUint8(0xF3);
397 EmitUint8(0x0F);
398 EmitUint8(0x58);
399 EmitOperand(dst, src);
400}
401
403 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
404 EmitUint8(0xF3);
405 EmitUint8(0x0F);
406 EmitUint8(0x5C);
407 EmitXmmRegisterOperand(dst, src);
408}
409
410void Assembler::subss(XmmRegister dst, const Address& src) {
411 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
412 EmitUint8(0xF3);
413 EmitUint8(0x0F);
414 EmitUint8(0x5C);
415 EmitOperand(dst, src);
416}
417
419 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
420 EmitUint8(0xF3);
421 EmitUint8(0x0F);
422 EmitUint8(0x59);
423 EmitXmmRegisterOperand(dst, src);
424}
425
426void Assembler::mulss(XmmRegister dst, const Address& src) {
427 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
428 EmitUint8(0xF3);
429 EmitUint8(0x0F);
430 EmitUint8(0x59);
431 EmitOperand(dst, src);
432}
433
435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
436 EmitUint8(0xF3);
437 EmitUint8(0x0F);
438 EmitUint8(0x5E);
439 EmitXmmRegisterOperand(dst, src);
440}
441
442void Assembler::divss(XmmRegister dst, const Address& src) {
443 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
444 EmitUint8(0xF3);
445 EmitUint8(0x0F);
446 EmitUint8(0x5E);
447 EmitOperand(dst, src);
448}
449
450void Assembler::flds(const Address& src) {
451 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
452 EmitUint8(0xD9);
453 EmitOperand(0, src);
454}
455
456void Assembler::fstps(const Address& dst) {
457 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
458 EmitUint8(0xD9);
459 EmitOperand(3, dst);
460}
461
462void Assembler::movsd(XmmRegister dst, const Address& src) {
463 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
464 EmitUint8(0xF2);
465 EmitUint8(0x0F);
466 EmitUint8(0x10);
467 EmitOperand(dst, src);
468}
469
470void Assembler::movsd(const Address& dst, XmmRegister src) {
471 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
472 EmitUint8(0xF2);
473 EmitUint8(0x0F);
474 EmitUint8(0x11);
475 EmitOperand(src, dst);
476}
477
479 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
480 EmitUint8(0xF2);
481 EmitUint8(0x0F);
482 EmitUint8(0x11);
483 EmitXmmRegisterOperand(src, dst);
484}
485
487 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
488 EmitUint8(0x0F);
489 EmitUint8(0x28);
490 EmitXmmRegisterOperand(dst, src);
491}
492
493void Assembler::movups(XmmRegister dst, const Address& src) {
494 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
495 EmitUint8(0x0F);
496 EmitUint8(0x10);
497 EmitOperand(dst, src);
498}
499
500void Assembler::movups(const Address& dst, XmmRegister src) {
501 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
502 EmitUint8(0x0F);
503 EmitUint8(0x11);
504 EmitOperand(src, dst);
505}
506
508 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
509 EmitUint8(0xF2);
510 EmitUint8(0x0F);
511 EmitUint8(0x58);
512 EmitXmmRegisterOperand(dst, src);
513}
514
515void Assembler::addsd(XmmRegister dst, const Address& src) {
516 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
517 EmitUint8(0xF2);
518 EmitUint8(0x0F);
519 EmitUint8(0x58);
520 EmitOperand(dst, src);
521}
522
524 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
525 EmitUint8(0x66);
526 EmitUint8(0x0F);
527 EmitUint8(0xFE);
528 EmitXmmRegisterOperand(dst, src);
529}
530
532 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
533 EmitUint8(0x66);
534 EmitUint8(0x0F);
535 EmitUint8(0xFA);
536 EmitXmmRegisterOperand(dst, src);
537}
538
540 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
541 EmitUint8(0x0F);
542 EmitUint8(0x58);
543 EmitXmmRegisterOperand(dst, src);
544}
545
547 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
548 EmitUint8(0x0F);
549 EmitUint8(0x5C);
550 EmitXmmRegisterOperand(dst, src);
551}
552
554 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
555 EmitUint8(0x0F);
556 EmitUint8(0x5E);
557 EmitXmmRegisterOperand(dst, src);
558}
559
561 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
562 EmitUint8(0x0F);
563 EmitUint8(0x59);
564 EmitXmmRegisterOperand(dst, src);
565}
566
568 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
569 EmitUint8(0x0F);
570 EmitUint8(0x5D);
571 EmitXmmRegisterOperand(dst, src);
572}
573
575 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
576 EmitUint8(0x0F);
577 EmitUint8(0x5F);
578 EmitXmmRegisterOperand(dst, src);
579}
580
582 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
583 EmitUint8(0x0F);
584 EmitUint8(0x54);
585 EmitXmmRegisterOperand(dst, src);
586}
587
588void Assembler::andps(XmmRegister dst, const Address& src) {
589 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
590 EmitUint8(0x0F);
591 EmitUint8(0x54);
592 EmitOperand(dst, src);
593}
594
596 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
597 EmitUint8(0x0F);
598 EmitUint8(0x56);
599 EmitXmmRegisterOperand(dst, src);
600}
601
603 static const struct ALIGN16 {
604 uint32_t a;
605 uint32_t b;
606 uint32_t c;
607 uint32_t d;
608 } float_not_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF};
609 xorps(dst, Address::Absolute(reinterpret_cast<uword>(&float_not_constant)));
610}
611
613 static const struct ALIGN16 {
614 uint32_t a;
615 uint32_t b;
616 uint32_t c;
617 uint32_t d;
618 } float_negate_constant = {0x80000000, 0x80000000, 0x80000000, 0x80000000};
619 xorps(dst,
620 Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
621}
622
624 static const struct ALIGN16 {
625 uint32_t a;
626 uint32_t b;
627 uint32_t c;
628 uint32_t d;
629 } float_absolute_constant = {0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF};
630 andps(dst,
631 Address::Absolute(reinterpret_cast<uword>(&float_absolute_constant)));
632}
633
635 static const struct ALIGN16 {
636 uint32_t a;
637 uint32_t b;
638 uint32_t c;
639 uint32_t d;
640 } float_zerow_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000};
641 andps(dst, Address::Absolute(reinterpret_cast<uword>(&float_zerow_constant)));
642}
643
645 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
646 EmitUint8(0x0F);
647 EmitUint8(0xC2);
648 EmitXmmRegisterOperand(dst, src);
649 EmitUint8(0x0);
650}
651
653 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
654 EmitUint8(0x0F);
655 EmitUint8(0xC2);
656 EmitXmmRegisterOperand(dst, src);
657 EmitUint8(0x4);
658}
659
661 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
662 EmitUint8(0x0F);
663 EmitUint8(0xC2);
664 EmitXmmRegisterOperand(dst, src);
665 EmitUint8(0x1);
666}
667
669 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
670 EmitUint8(0x0F);
671 EmitUint8(0xC2);
672 EmitXmmRegisterOperand(dst, src);
673 EmitUint8(0x2);
674}
675
677 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
678 EmitUint8(0x0F);
679 EmitUint8(0xC2);
680 EmitXmmRegisterOperand(dst, src);
681 EmitUint8(0x5);
682}
683
685 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
686 EmitUint8(0x0F);
687 EmitUint8(0xC2);
688 EmitXmmRegisterOperand(dst, src);
689 EmitUint8(0x6);
690}
691
693 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
694 EmitUint8(0x0F);
695 EmitUint8(0x51);
696 EmitXmmRegisterOperand(dst, dst);
697}
698
700 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
701 EmitUint8(0x0F);
702 EmitUint8(0x52);
703 EmitXmmRegisterOperand(dst, dst);
704}
705
707 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
708 EmitUint8(0x0F);
709 EmitUint8(0x53);
710 EmitXmmRegisterOperand(dst, dst);
711}
712
714 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
715 EmitUint8(0x0F);
716 EmitUint8(0x12);
717 EmitXmmRegisterOperand(dst, src);
718}
719
721 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
722 EmitUint8(0x0F);
723 EmitUint8(0x16);
724 EmitXmmRegisterOperand(dst, src);
725}
726
728 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
729 EmitUint8(0x0F);
730 EmitUint8(0x14);
731 EmitXmmRegisterOperand(dst, src);
732}
733
735 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
736 EmitUint8(0x0F);
737 EmitUint8(0x15);
738 EmitXmmRegisterOperand(dst, src);
739}
740
742 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
743 EmitUint8(0x66);
744 EmitUint8(0x0F);
745 EmitUint8(0x14);
746 EmitXmmRegisterOperand(dst, src);
747}
748
750 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
751 EmitUint8(0x66);
752 EmitUint8(0x0F);
753 EmitUint8(0x15);
754 EmitXmmRegisterOperand(dst, src);
755}
756
757void Assembler::set1ps(XmmRegister dst, Register tmp1, const Immediate& imm) {
758 // Load 32-bit immediate value into tmp1.
759 movl(tmp1, imm);
760 // Move value from tmp1 into dst.
761 movd(dst, tmp1);
762 // Broadcast low lane into other three lanes.
763 shufps(dst, dst, Immediate(0x0));
764}
765
766void Assembler::shufps(XmmRegister dst, XmmRegister src, const Immediate& imm) {
767 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
768 EmitUint8(0x0F);
769 EmitUint8(0xC6);
770 EmitXmmRegisterOperand(dst, src);
771 ASSERT(imm.is_uint8());
772 EmitUint8(imm.value());
773}
774
776 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
777 EmitUint8(0x66);
778 EmitUint8(0x0F);
779 EmitUint8(0x58);
780 EmitXmmRegisterOperand(dst, src);
781}
782
784 static const struct ALIGN16 {
785 uint64_t a;
786 uint64_t b;
787 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
788 xorpd(dst,
789 Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
790}
791
793 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
794 EmitUint8(0x66);
795 EmitUint8(0x0F);
796 EmitUint8(0x5C);
797 EmitXmmRegisterOperand(dst, src);
798}
799
801 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
802 EmitUint8(0x66);
803 EmitUint8(0x0F);
804 EmitUint8(0x59);
805 EmitXmmRegisterOperand(dst, src);
806}
807
809 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
810 EmitUint8(0x66);
811 EmitUint8(0x0F);
812 EmitUint8(0x5E);
813 EmitXmmRegisterOperand(dst, src);
814}
815
817 static const struct ALIGN16 {
818 uint64_t a;
819 uint64_t b;
820 } double_absolute_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
821 andpd(dst,
822 Address::Absolute(reinterpret_cast<uword>(&double_absolute_constant)));
823}
824
826 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
827 EmitUint8(0x66);
828 EmitUint8(0x0F);
829 EmitUint8(0x5D);
830 EmitXmmRegisterOperand(dst, src);
831}
832
834 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
835 EmitUint8(0x66);
836 EmitUint8(0x0F);
837 EmitUint8(0x5F);
838 EmitXmmRegisterOperand(dst, src);
839}
840
842 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
843 EmitUint8(0x66);
844 EmitUint8(0x0F);
845 EmitUint8(0x51);
846 EmitXmmRegisterOperand(dst, dst);
847}
848
850 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
851 EmitUint8(0x0F);
852 EmitUint8(0x5A);
853 EmitXmmRegisterOperand(dst, src);
854}
855
857 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
858 EmitUint8(0x66);
859 EmitUint8(0x0F);
860 EmitUint8(0x5A);
861 EmitXmmRegisterOperand(dst, src);
862}
863
864void Assembler::shufpd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
865 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
866 EmitUint8(0x66);
867 EmitUint8(0x0F);
868 EmitUint8(0xC6);
869 EmitXmmRegisterOperand(dst, src);
870 ASSERT(imm.is_uint8());
871 EmitUint8(imm.value());
872}
873
875 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
876 EmitUint8(0xF2);
877 EmitUint8(0x0F);
878 EmitUint8(0x5C);
879 EmitXmmRegisterOperand(dst, src);
880}
881
882void Assembler::subsd(XmmRegister dst, const Address& src) {
883 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
884 EmitUint8(0xF2);
885 EmitUint8(0x0F);
886 EmitUint8(0x5C);
887 EmitOperand(dst, src);
888}
889
891 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
892 EmitUint8(0xF2);
893 EmitUint8(0x0F);
894 EmitUint8(0x59);
895 EmitXmmRegisterOperand(dst, src);
896}
897
898void Assembler::mulsd(XmmRegister dst, const Address& src) {
899 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
900 EmitUint8(0xF2);
901 EmitUint8(0x0F);
902 EmitUint8(0x59);
903 EmitOperand(dst, src);
904}
905
907 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
908 EmitUint8(0xF2);
909 EmitUint8(0x0F);
910 EmitUint8(0x5E);
911 EmitXmmRegisterOperand(dst, src);
912}
913
914void Assembler::divsd(XmmRegister dst, const Address& src) {
915 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
916 EmitUint8(0xF2);
917 EmitUint8(0x0F);
918 EmitUint8(0x5E);
919 EmitOperand(dst, src);
920}
921
923 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
924 EmitUint8(0xF3);
925 EmitUint8(0x0F);
926 EmitUint8(0x2A);
927 EmitOperand(dst, Operand(src));
928}
929
931 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
932 EmitUint8(0xF2);
933 EmitUint8(0x0F);
934 EmitUint8(0x2A);
935 EmitOperand(dst, Operand(src));
936}
937
939 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
940 EmitUint8(0xF3);
941 EmitUint8(0x0F);
942 EmitUint8(0x2D);
943 EmitXmmRegisterOperand(dst, src);
944}
945
947 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
948 EmitUint8(0xF3);
949 EmitUint8(0x0F);
950 EmitUint8(0x5A);
951 EmitXmmRegisterOperand(dst, src);
952}
953
955 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
956 EmitUint8(0xF2);
957 EmitUint8(0x0F);
958 EmitUint8(0x2D);
959 EmitXmmRegisterOperand(dst, src);
960}
961
963 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
964 EmitUint8(0xF3);
965 EmitUint8(0x0F);
966 EmitUint8(0x2C);
967 EmitXmmRegisterOperand(dst, src);
968}
969
971 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
972 EmitUint8(0xF2);
973 EmitUint8(0x0F);
974 EmitUint8(0x2C);
975 EmitXmmRegisterOperand(dst, src);
976}
977
979 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
980 EmitUint8(0xF2);
981 EmitUint8(0x0F);
982 EmitUint8(0x5A);
983 EmitXmmRegisterOperand(dst, src);
984}
985
987 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
988 EmitUint8(0xF3);
989 EmitUint8(0x0F);
990 EmitUint8(0xE6);
991 EmitXmmRegisterOperand(dst, src);
992}
993
995 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
996 EmitUint8(0x0F);
997 EmitUint8(0x2F);
998 EmitXmmRegisterOperand(a, b);
999}
1000
1002 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1003 EmitUint8(0x66);
1004 EmitUint8(0x0F);
1005 EmitUint8(0x2F);
1006 EmitXmmRegisterOperand(a, b);
1007}
1008
1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1011 EmitUint8(0x66);
1012 EmitUint8(0x0F);
1013 EmitUint8(0x50);
1014 EmitXmmRegisterOperand(dst, src);
1015}
1016
1018 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1019 EmitUint8(0x0F);
1020 EmitUint8(0x50);
1021 EmitXmmRegisterOperand(dst, src);
1022}
1023
1025 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1026 EmitUint8(0x66);
1027 EmitUint8(0x0F);
1028 EmitUint8(0xD7);
1029 EmitXmmRegisterOperand(dst, src);
1030}
1031
1033 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1034 EmitUint8(0xF2);
1035 EmitUint8(0x0F);
1036 EmitUint8(0x51);
1037 EmitXmmRegisterOperand(dst, src);
1038}
1039
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 EmitUint8(0xF3);
1043 EmitUint8(0x0F);
1044 EmitUint8(0x51);
1045 EmitXmmRegisterOperand(dst, src);
1046}
1047
1048void Assembler::xorpd(XmmRegister dst, const Address& src) {
1049 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1050 EmitUint8(0x66);
1051 EmitUint8(0x0F);
1052 EmitUint8(0x57);
1053 EmitOperand(dst, src);
1054}
1055
1057 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1058 EmitUint8(0x66);
1059 EmitUint8(0x0F);
1060 EmitUint8(0x57);
1061 EmitXmmRegisterOperand(dst, src);
1062}
1063
1065 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1066 EmitUint8(0x66);
1067 EmitUint8(0x0F);
1068 EmitUint8(0x56);
1069 EmitXmmRegisterOperand(dst, src);
1070}
1071
1072void Assembler::xorps(XmmRegister dst, const Address& src) {
1073 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1074 EmitUint8(0x0F);
1075 EmitUint8(0x57);
1076 EmitOperand(dst, src);
1077}
1078
1080 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1081 EmitUint8(0x0F);
1082 EmitUint8(0x57);
1083 EmitXmmRegisterOperand(dst, src);
1084}
1085
1086void Assembler::andpd(XmmRegister dst, const Address& src) {
1087 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1088 EmitUint8(0x66);
1089 EmitUint8(0x0F);
1090 EmitUint8(0x54);
1091 EmitOperand(dst, src);
1092}
1093
1095 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1096 EmitUint8(0x66);
1097 EmitUint8(0x0F);
1098 EmitUint8(0x54);
1099 EmitXmmRegisterOperand(dst, src);
1100}
1101
1102void Assembler::pextrd(Register dst, XmmRegister src, const Immediate& imm) {
1104 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1105 EmitUint8(0x66);
1106 EmitUint8(0x0F);
1107 EmitUint8(0x3A);
1108 EmitUint8(0x16);
1109 EmitOperand(src, Operand(dst));
1110 ASSERT(imm.is_uint8());
1111 EmitUint8(imm.value());
1112}
1113
1116 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1117 EmitUint8(0x66);
1118 EmitUint8(0x0F);
1119 EmitUint8(0x38);
1120 EmitUint8(0x25);
1121 EmitXmmRegisterOperand(dst, src);
1122}
1123
1126 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1127 EmitUint8(0x66);
1128 EmitUint8(0x0F);
1129 EmitUint8(0x38);
1130 EmitUint8(0x29);
1131 EmitXmmRegisterOperand(dst, src);
1132}
1133
1135 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1136 EmitUint8(0x66);
1137 EmitUint8(0x0F);
1138 EmitUint8(0xEF);
1139 EmitXmmRegisterOperand(dst, src);
1140}
1141
1144 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1145 EmitUint8(0x66);
1146 EmitUint8(0x0F);
1147 EmitUint8(0x3A);
1148 EmitUint8(0x0B);
1149 EmitXmmRegisterOperand(dst, src);
1150 // Mask precision exception.
1151 EmitUint8(static_cast<uint8_t>(mode) | 0x8);
1152}
1153
1154void Assembler::fldl(const Address& src) {
1155 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1156 EmitUint8(0xDD);
1157 EmitOperand(0, src);
1158}
1159
1160void Assembler::fstpl(const Address& dst) {
1161 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1162 EmitUint8(0xDD);
1163 EmitOperand(3, dst);
1164}
1165
1166void Assembler::fnstcw(const Address& dst) {
1167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1168 EmitUint8(0xD9);
1169 EmitOperand(7, dst);
1170}
1171
1172void Assembler::fldcw(const Address& src) {
1173 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1174 EmitUint8(0xD9);
1175 EmitOperand(5, src);
1176}
1177
1178void Assembler::fistpl(const Address& dst) {
1179 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1180 EmitUint8(0xDF);
1181 EmitOperand(7, dst);
1182}
1183
1184void Assembler::fistps(const Address& dst) {
1185 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1186 EmitUint8(0xDB);
1187 EmitOperand(3, dst);
1188}
1189
1190void Assembler::fildl(const Address& src) {
1191 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1192 EmitUint8(0xDF);
1193 EmitOperand(5, src);
1194}
1195
1196void Assembler::filds(const Address& src) {
1197 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1198 EmitUint8(0xDB);
1199 EmitOperand(0, src);
1200}
1201
1202void Assembler::fincstp() {
1203 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1204 EmitUint8(0xD9);
1205 EmitUint8(0xF7);
1206}
1207
1208void Assembler::ffree(intptr_t value) {
1209 ASSERT(value < 7);
1210 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1211 EmitUint8(0xDD);
1212 EmitUint8(0xC0 + value);
1213}
1214
1215void Assembler::fsin() {
1216 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1217 EmitUint8(0xD9);
1218 EmitUint8(0xFE);
1219}
1220
1221void Assembler::fcos() {
1222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1223 EmitUint8(0xD9);
1224 EmitUint8(0xFF);
1225}
1226
1227void Assembler::fsincos() {
1228 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1229 EmitUint8(0xD9);
1230 EmitUint8(0xFB);
1231}
1232
1233void Assembler::fptan() {
1234 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1235 EmitUint8(0xD9);
1236 EmitUint8(0xF2);
1237}
1238
1239void Assembler::xchgl(Register dst, Register src) {
1240 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1241 EmitUint8(0x87);
1242 EmitRegisterOperand(dst, src);
1243}
1244
1245void Assembler::cmpw(const Address& address, const Immediate& imm) {
1246 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1247 EmitOperandSizeOverride();
1248 EmitUint8(0x81);
1249 EmitOperand(7, address);
1250 EmitUint8(imm.value() & 0xFF);
1251 EmitUint8((imm.value() >> 8) & 0xFF);
1252}
1253
1254void Assembler::cmpb(const Address& address, const Immediate& imm) {
1255 ASSERT(imm.is_int8());
1256 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1257 EmitUint8(0x80);
1258 EmitOperand(7, address);
1259 EmitUint8(imm.value() & 0xFF);
1260}
1261
1262void Assembler::testl(Register reg1, Register reg2) {
1263 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1264 EmitUint8(0x85);
1265 EmitRegisterOperand(reg1, reg2);
1266}
1267
1268void Assembler::testl(Register reg, const Immediate& immediate) {
1269 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1270 // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
1271 // we only test the byte register to keep the encoding short.
1272 if (immediate.is_uint8() && reg < 4) {
1273 // Use zero-extended 8-bit immediate.
1274 if (reg == EAX) {
1275 EmitUint8(0xA8);
1276 } else {
1277 EmitUint8(0xF6);
1278 EmitUint8(0xC0 + reg);
1279 }
1280 EmitUint8(immediate.value() & 0xFF);
1281 } else if (reg == EAX) {
1282 // Use short form if the destination is EAX.
1283 EmitUint8(0xA9);
1284 EmitImmediate(immediate);
1285 } else {
1286 EmitUint8(0xF7);
1287 EmitOperand(0, Operand(reg));
1288 EmitImmediate(immediate);
1289 }
1290}
1291
1292void Assembler::testl(const Address& address, const Immediate& immediate) {
1293 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1294 EmitUint8(0xF7);
1295 EmitOperand(0, address);
1296 EmitImmediate(immediate);
1297}
1298
1299void Assembler::testl(const Address& address, Register reg) {
1300 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1301 EmitUint8(0x85);
1302 EmitOperand(reg, address);
1303}
1304
1305void Assembler::testb(const Address& address, const Immediate& imm) {
1306 ASSERT(imm.is_int8());
1307 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1308 EmitUint8(0xF6);
1309 EmitOperand(0, address);
1310 EmitUint8(imm.value() & 0xFF);
1311}
1312
1313void Assembler::testb(const Address& address, ByteRegister reg) {
1314 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1315 EmitUint8(0x84);
1316 EmitOperand(reg, address);
1317}
1318
1319void Assembler::Alu(int bytes, uint8_t opcode, Register dst, Register src) {
1320 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1321 if (bytes == 2) {
1322 EmitOperandSizeOverride();
1323 }
1324 ASSERT((opcode & 7) == 3);
1325 EmitUint8(opcode);
1326 EmitOperand(dst, Operand(src));
1327}
1328
1329void Assembler::Alu(uint8_t modrm_opcode, Register dst, const Immediate& imm) {
1330 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1331 EmitComplex(modrm_opcode, Operand(dst), imm);
1332}
1333
1334void Assembler::Alu(int bytes,
1335 uint8_t opcode,
1336 Register dst,
1337 const Address& src) {
1338 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1339 if (bytes == 2) {
1340 EmitOperandSizeOverride();
1341 }
1342 ASSERT((opcode & 7) == 3);
1343 EmitUint8(opcode);
1344 EmitOperand(dst, src);
1345}
1346
1347void Assembler::Alu(int bytes,
1348 uint8_t opcode,
1349 const Address& dst,
1350 Register src) {
1351 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1352 if (bytes == 2) {
1353 EmitOperandSizeOverride();
1354 }
1355 ASSERT((opcode & 7) == 1);
1356 EmitUint8(opcode);
1357 EmitOperand(src, dst);
1358}
1359
1360void Assembler::Alu(uint8_t modrm_opcode,
1361 const Address& dst,
1362 const Immediate& imm) {
1363 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1364 EmitComplex(modrm_opcode, dst, imm);
1365}
1366
1367void Assembler::cdq() {
1368 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1369 EmitUint8(0x99);
1370}
1371
1372void Assembler::idivl(Register reg) {
1373 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1374 EmitUint8(0xF7);
1375 EmitOperand(7, Operand(reg));
1376}
1377
1378void Assembler::divl(Register reg) {
1379 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1380 EmitUint8(0xF7);
1381 EmitOperand(6, Operand(reg));
1382}
1383
1384void Assembler::imull(Register dst, Register src) {
1385 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1386 EmitUint8(0x0F);
1387 EmitUint8(0xAF);
1388 EmitOperand(dst, Operand(src));
1389}
1390
1391void Assembler::imull(Register reg, const Immediate& imm) {
1392 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1393 EmitUint8(0x69);
1394 EmitOperand(reg, Operand(reg));
1395 EmitImmediate(imm);
1396}
1397
1398void Assembler::imull(Register reg, const Address& address) {
1399 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1400 EmitUint8(0x0F);
1401 EmitUint8(0xAF);
1402 EmitOperand(reg, address);
1403}
1404
1405void Assembler::imull(Register reg) {
1406 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1407 EmitUint8(0xF7);
1408 EmitOperand(5, Operand(reg));
1409}
1410
1411void Assembler::imull(const Address& address) {
1412 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1413 EmitUint8(0xF7);
1414 EmitOperand(5, address);
1415}
1416
1417void Assembler::mull(Register reg) {
1418 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1419 EmitUint8(0xF7);
1420 EmitOperand(4, Operand(reg));
1421}
1422
1423void Assembler::mull(const Address& address) {
1424 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1425 EmitUint8(0xF7);
1426 EmitOperand(4, address);
1427}
1428
1429void Assembler::incl(Register reg) {
1430 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1431 EmitUint8(0x40 + reg);
1432}
1433
1434void Assembler::incl(const Address& address) {
1435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1436 EmitUint8(0xFF);
1437 EmitOperand(0, address);
1438}
1439
1440void Assembler::decl(Register reg) {
1441 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1442 EmitUint8(0x48 + reg);
1443}
1444
1445void Assembler::decl(const Address& address) {
1446 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1447 EmitUint8(0xFF);
1448 EmitOperand(1, address);
1449}
1450
1451void Assembler::shll(Register reg, const Immediate& imm) {
1452 EmitGenericShift(4, reg, imm);
1453}
1454
1455void Assembler::shll(Register operand, Register shifter) {
1456 EmitGenericShift(4, Operand(operand), shifter);
1457}
1458
1459void Assembler::shll(const Address& operand, Register shifter) {
1460 EmitGenericShift(4, Operand(operand), shifter);
1461}
1462
1463void Assembler::shrl(Register reg, const Immediate& imm) {
1464 EmitGenericShift(5, reg, imm);
1465}
1466
1467void Assembler::shrl(Register operand, Register shifter) {
1468 EmitGenericShift(5, Operand(operand), shifter);
1469}
1470
1471void Assembler::sarl(Register reg, const Immediate& imm) {
1472 EmitGenericShift(7, reg, imm);
1473}
1474
1475void Assembler::sarl(Register operand, Register shifter) {
1476 EmitGenericShift(7, Operand(operand), shifter);
1477}
1478
1479void Assembler::sarl(const Address& address, Register shifter) {
1480 EmitGenericShift(7, Operand(address), shifter);
1481}
1482
1483void Assembler::shldl(Register dst, Register src, Register shifter) {
1484 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1485 ASSERT(shifter == ECX);
1486 EmitUint8(0x0F);
1487 EmitUint8(0xA5);
1488 EmitRegisterOperand(src, dst);
1489}
1490
1491void Assembler::shldl(Register dst, Register src, const Immediate& imm) {
1492 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1493 ASSERT(imm.is_int8());
1494 EmitUint8(0x0F);
1495 EmitUint8(0xA4);
1496 EmitRegisterOperand(src, dst);
1497 EmitUint8(imm.value() & 0xFF);
1498}
1499
1500void Assembler::shldl(const Address& operand, Register src, Register shifter) {
1501 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1502 ASSERT(shifter == ECX);
1503 EmitUint8(0x0F);
1504 EmitUint8(0xA5);
1505 EmitOperand(src, Operand(operand));
1506}
1507
1508void Assembler::shrdl(Register dst, Register src, Register shifter) {
1509 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1510 ASSERT(shifter == ECX);
1511 EmitUint8(0x0F);
1512 EmitUint8(0xAD);
1513 EmitRegisterOperand(src, dst);
1514}
1515
1516void Assembler::shrdl(Register dst, Register src, const Immediate& imm) {
1517 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1518 ASSERT(imm.is_int8());
1519 EmitUint8(0x0F);
1520 EmitUint8(0xAC);
1521 EmitRegisterOperand(src, dst);
1522 EmitUint8(imm.value() & 0xFF);
1523}
1524
1525void Assembler::shrdl(const Address& dst, Register src, Register shifter) {
1526 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1527 ASSERT(shifter == ECX);
1528 EmitUint8(0x0F);
1529 EmitUint8(0xAD);
1530 EmitOperand(src, Operand(dst));
1531}
1532
1533void Assembler::negl(Register reg) {
1534 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1535 EmitUint8(0xF7);
1536 EmitOperand(3, Operand(reg));
1537}
1538
1539void Assembler::notl(Register reg) {
1540 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1541 EmitUint8(0xF7);
1542 EmitUint8(0xD0 | reg);
1543}
1544
1545void Assembler::bsfl(Register dst, Register src) {
1546 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1547 EmitUint8(0x0F);
1548 EmitUint8(0xBC);
1549 EmitRegisterOperand(dst, src);
1550}
1551
1552void Assembler::bsrl(Register dst, Register src) {
1553 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1554 EmitUint8(0x0F);
1555 EmitUint8(0xBD);
1556 EmitRegisterOperand(dst, src);
1557}
1558
1559void Assembler::popcntl(Register dst, Register src) {
1561 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1562 EmitUint8(0xF3);
1563 EmitUint8(0x0F);
1564 EmitUint8(0xB8);
1565 EmitRegisterOperand(dst, src);
1566}
1567
1568void Assembler::lzcntl(Register dst, Register src) {
1570 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1571 EmitUint8(0xF3);
1572 EmitUint8(0x0F);
1573 EmitUint8(0xBD);
1574 EmitRegisterOperand(dst, src);
1575}
1576
1578 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1579 EmitUint8(0x0F);
1580 EmitUint8(0xA3);
1581 EmitRegisterOperand(offset, base);
1582}
1583
1584void Assembler::bt(Register base, int bit) {
1585 ASSERT(bit >= 0 && bit < 32);
1586 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1587 EmitUint8(0x0F);
1588 EmitUint8(0xBA);
1589 EmitRegisterOperand(4, base);
1590 EmitUint8(bit);
1591}
1592
1593void Assembler::enter(const Immediate& imm) {
1594 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1595 EmitUint8(0xC8);
1596 ASSERT(imm.is_uint16());
1597 EmitUint8(imm.value() & 0xFF);
1598 EmitUint8((imm.value() >> 8) & 0xFF);
1599 EmitUint8(0x00);
1600}
1601
1602void Assembler::leave() {
1603 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1604 EmitUint8(0xC9);
1605}
1606
1607void Assembler::ret() {
1608 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1609 EmitUint8(0xC3);
1610}
1611
1612void Assembler::ret(const Immediate& imm) {
1613 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1614 EmitUint8(0xC2);
1615 ASSERT(imm.is_uint16());
1616 EmitUint8(imm.value() & 0xFF);
1617 EmitUint8((imm.value() >> 8) & 0xFF);
1618}
1619
1620void Assembler::nop(int size) {
1621 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1622 // There are nops up to size 15, but for now just provide up to size 8.
1623 ASSERT(0 < size && size <= MAX_NOP_SIZE);
1624 switch (size) {
1625 case 1:
1626 EmitUint8(0x90);
1627 break;
1628 case 2:
1629 EmitUint8(0x66);
1630 EmitUint8(0x90);
1631 break;
1632 case 3:
1633 EmitUint8(0x0F);
1634 EmitUint8(0x1F);
1635 EmitUint8(0x00);
1636 break;
1637 case 4:
1638 EmitUint8(0x0F);
1639 EmitUint8(0x1F);
1640 EmitUint8(0x40);
1641 EmitUint8(0x00);
1642 break;
1643 case 5:
1644 EmitUint8(0x0F);
1645 EmitUint8(0x1F);
1646 EmitUint8(0x44);
1647 EmitUint8(0x00);
1648 EmitUint8(0x00);
1649 break;
1650 case 6:
1651 EmitUint8(0x66);
1652 EmitUint8(0x0F);
1653 EmitUint8(0x1F);
1654 EmitUint8(0x44);
1655 EmitUint8(0x00);
1656 EmitUint8(0x00);
1657 break;
1658 case 7:
1659 EmitUint8(0x0F);
1660 EmitUint8(0x1F);
1661 EmitUint8(0x80);
1662 EmitUint8(0x00);
1663 EmitUint8(0x00);
1664 EmitUint8(0x00);
1665 EmitUint8(0x00);
1666 break;
1667 case 8:
1668 EmitUint8(0x0F);
1669 EmitUint8(0x1F);
1670 EmitUint8(0x84);
1671 EmitUint8(0x00);
1672 EmitUint8(0x00);
1673 EmitUint8(0x00);
1674 EmitUint8(0x00);
1675 EmitUint8(0x00);
1676 break;
1677 default:
1678 UNIMPLEMENTED();
1679 }
1680}
1681
1682void Assembler::int3() {
1683 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1684 EmitUint8(0xCC);
1685}
1686
1687void Assembler::hlt() {
1688 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1689 EmitUint8(0xF4);
1690}
1691
1692void Assembler::j(Condition condition, Label* label, JumpDistance distance) {
1693 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1694 if (label->IsBound()) {
1695 const int kShortSize = 2;
1696 const int kLongSize = 6;
1697 intptr_t offset = label->Position() - buffer_.Size();
1698 ASSERT(offset <= 0);
1699 if (Utils::IsInt(8, offset - kShortSize)) {
1700 EmitUint8(0x70 + condition);
1701 EmitUint8((offset - kShortSize) & 0xFF);
1702 } else {
1703 EmitUint8(0x0F);
1704 EmitUint8(0x80 + condition);
1705 EmitInt32(offset - kLongSize);
1706 }
1707 } else if (distance == kNearJump) {
1708 EmitUint8(0x70 + condition);
1709 EmitNearLabelLink(label);
1710 } else {
1711 EmitUint8(0x0F);
1712 EmitUint8(0x80 + condition);
1713 EmitLabelLink(label);
1714 }
1715}
1716
1717void Assembler::j(Condition condition, const ExternalLabel* label) {
1718 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1719 EmitUint8(0x0F);
1720 EmitUint8(0x80 + condition);
1721 EmitFixup(new DirectCallRelocation());
1722 EmitInt32(label->address());
1723}
1724
1725void Assembler::jmp(Register reg) {
1726 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1727 EmitUint8(0xFF);
1728 EmitRegisterOperand(4, reg);
1729}
1730
1731void Assembler::jmp(const Address& address) {
1732 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1733 EmitUint8(0xFF);
1734 EmitOperand(4, address);
1735}
1736
1737void Assembler::jmp(Label* label, JumpDistance distance) {
1738 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1739 if (label->IsBound()) {
1740 const int kShortSize = 2;
1741 const int kLongSize = 5;
1742 intptr_t offset = label->Position() - buffer_.Size();
1743 ASSERT(offset <= 0);
1744 if (Utils::IsInt(8, offset - kShortSize)) {
1745 EmitUint8(0xEB);
1746 EmitUint8((offset - kShortSize) & 0xFF);
1747 } else {
1748 EmitUint8(0xE9);
1749 EmitInt32(offset - kLongSize);
1750 }
1751 } else if (distance == kNearJump) {
1752 EmitUint8(0xEB);
1753 EmitNearLabelLink(label);
1754 } else {
1755 EmitUint8(0xE9);
1756 EmitLabelLink(label);
1757 }
1758}
1759
1760void Assembler::jmp(const ExternalLabel* label) {
1761 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1762 EmitUint8(0xE9);
1763 EmitFixup(new DirectCallRelocation());
1764 EmitInt32(label->address());
1765}
1766
1767void Assembler::lock() {
1768 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1769 EmitUint8(0xF0);
1770}
1771
1772void Assembler::cmpxchgl(const Address& address, Register reg) {
1773 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1774 EmitUint8(0x0F);
1775 EmitUint8(0xB1);
1776 EmitOperand(reg, address);
1777}
1778
1779void Assembler::cld() {
1780 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1781 EmitUint8(0xFC);
1782}
1783
1784void Assembler::std() {
1785 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1786 EmitUint8(0xFD);
1787}
1788
1789void Assembler::cpuid() {
1790 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1791 EmitUint8(0x0F);
1792 EmitUint8(0xA2);
1793}
1794
1796 cmpl(a, b);
1797}
1798
1799void Assembler::Load(Register reg, const Address& address, OperandSize type) {
1800 switch (type) {
1801 case kByte:
1802 return movsxb(reg, address);
1803 case kUnsignedByte:
1804 return movzxb(reg, address);
1805 case kTwoBytes:
1806 return movsxw(reg, address);
1807 case kUnsignedTwoBytes:
1808 return movzxw(reg, address);
1809 case kUnsignedFourBytes:
1810 case kFourBytes:
1811 return movl(reg, address);
1812 default:
1813 UNREACHABLE();
1814 break;
1815 }
1816}
1817
1818void Assembler::Store(Register reg, const Address& address, OperandSize sz) {
1819 switch (sz) {
1820 case kByte:
1821 case kUnsignedByte:
1822 return movb(address, reg);
1823 case kTwoBytes:
1824 case kUnsignedTwoBytes:
1825 return movw(address, reg);
1826 case kFourBytes:
1827 case kUnsignedFourBytes:
1828 return movl(address, reg);
1829 default:
1830 UNREACHABLE();
1831 break;
1832 }
1833}
1834
1835void Assembler::Store(const Object& object, const Address& dst) {
1836 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
1837 movl(dst, Immediate(target::ToRawPointer(object)));
1838 } else {
1839 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
1840 ASSERT(IsInOldSpace(object));
1841 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1842 EmitUint8(0xC7);
1843 EmitOperand(0, dst);
1844 buffer_.EmitObject(object);
1845 }
1846}
1847
1848void Assembler::ArithmeticShiftRightImmediate(Register reg, intptr_t shift) {
1849 sarl(reg, Immediate(shift));
1850}
1851
1853 Register reg2,
1854 intptr_t offset,
1856 Register temp,
1857 Label* equals) {
1858 Label loop;
1859 Bind(&loop);
1860 decl(count);
1862 COMPILE_ASSERT(target::kWordSize == 4);
1863 movl(temp, FieldAddress(reg1, count, TIMES_4, offset));
1864 cmpl(temp, FieldAddress(reg2, count, TIMES_4, offset));
1866}
1867
1868void Assembler::LoadFromStack(Register dst, intptr_t depth) {
1869 ASSERT(depth >= 0);
1870 movl(dst, Address(ESP, depth * target::kWordSize));
1871}
1872
1873void Assembler::StoreToStack(Register src, intptr_t depth) {
1874 ASSERT(depth >= 0);
1875 movl(Address(ESP, depth * target::kWordSize), src);
1876}
1877
1878void Assembler::CompareToStack(Register src, intptr_t depth) {
1879 cmpl(src, Address(ESP, depth * target::kWordSize));
1880}
1881
1883 switch (sz) {
1884 case kUnsignedFourBytes:
1885 case kFourBytes:
1886 if (to == from) return; // No operation needed.
1887 return movl(to, from);
1888 case kUnsignedTwoBytes:
1889 return movzxw(to, from);
1890 case kTwoBytes:
1891 return movsxw(to, from);
1892 case kUnsignedByte:
1893 switch (from) {
1894 case EAX:
1895 case EBX:
1896 case ECX:
1897 case EDX:
1898 return movzxb(to, ByteRegisterOf(from));
1899 break;
1900 default:
1901 if (to != from) {
1902 movl(to, from);
1903 }
1904 return andl(to, Immediate(0xFF));
1905 }
1906 case kByte:
1907 switch (from) {
1908 case EAX:
1909 case EBX:
1910 case ECX:
1911 case EDX:
1912 return movsxb(to, ByteRegisterOf(from));
1913 break;
1914 default:
1915 if (to != from) {
1916 movl(to, from);
1917 }
1918 shll(to, Immediate(24));
1919 return sarl(to, Immediate(24));
1920 }
1921 default:
1922 UNIMPLEMENTED();
1923 break;
1924 }
1925}
1926
1928 pushl(r);
1929}
1930
1932 popl(r);
1933}
1934
1935void Assembler::PushRegistersInOrder(std::initializer_list<Register> regs) {
1936 for (Register reg : regs) {
1937 PushRegister(reg);
1938 }
1939}
1940
1941void Assembler::AddImmediate(Register reg, const Immediate& imm) {
1942 const intptr_t value = imm.value();
1943 if (value == 0) {
1944 return;
1945 }
1946 if ((value > 0) || (value == kMinInt32)) {
1947 if (value == 1) {
1948 incl(reg);
1949 } else {
1950 addl(reg, imm);
1951 }
1952 } else {
1953 SubImmediate(reg, Immediate(-value));
1954 }
1955}
1956
1957void Assembler::AddImmediate(Register dest, Register src, int32_t value) {
1958 if (dest == src) {
1959 AddImmediate(dest, value);
1960 return;
1961 }
1962 if (value == 0) {
1963 MoveRegister(dest, src);
1964 return;
1965 }
1966 leal(dest, Address(src, value));
1967}
1968
1969void Assembler::SubImmediate(Register reg, const Immediate& imm) {
1970 const intptr_t value = imm.value();
1971 if (value == 0) {
1972 return;
1973 }
1974 if ((value > 0) || (value == kMinInt32)) {
1975 if (value == 1) {
1976 decl(reg);
1977 } else {
1978 subl(reg, imm);
1979 }
1980 } else {
1981 AddImmediate(reg, Immediate(-value));
1982 }
1983}
1984
1985void Assembler::AndRegisters(Register dst, Register src1, Register src2) {
1986 ASSERT(src1 != src2); // Likely a mistake.
1987 if (src2 == kNoRegister) {
1988 src2 = dst;
1989 }
1990 if (dst == src2) {
1991 andl(dst, src1);
1992 } else if (dst == src1) {
1993 andl(dst, src2);
1994 } else {
1995 movl(dst, src1);
1996 andl(dst, src2);
1997 }
1998}
1999
2000void Assembler::Drop(intptr_t stack_elements) {
2001 ASSERT(stack_elements >= 0);
2002 if (stack_elements > 0) {
2003 addl(ESP, Immediate(stack_elements * target::kWordSize));
2004 }
2005}
2006
2008 movl(dst, Address(THR, target::Thread::isolate_offset()));
2009}
2010
2012 movl(dst, Address(THR, target::Thread::isolate_group_offset()));
2013}
2014
2016 const Object& object,
2017 bool movable_referent) {
2018 ASSERT(IsOriginalObject(object));
2019
2020 // movable_referent: some references to VM heap objects may be patched with
2021 // references to isolate-local objects (e.g., optimized static calls).
2022 // We need to track such references since the latter may move during
2023 // compaction.
2024 if (target::CanEmbedAsRawPointerInGeneratedCode(object) &&
2025 !movable_referent) {
2026 movl(dst, Immediate(target::ToRawPointer(object)));
2027 } else {
2028 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2029 ASSERT(IsInOldSpace(object));
2030 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2031 EmitUint8(0xB8 + dst);
2032 buffer_.EmitObject(object);
2033 }
2034}
2035
2036void Assembler::LoadObjectSafely(Register dst, const Object& object) {
2037 ASSERT(IsOriginalObject(object));
2038 if (target::IsSmi(object) && !IsSafeSmi(object)) {
2039 const int32_t cookie = jit_cookie();
2040 movl(dst, Immediate(target::ToRawSmi(object) ^ cookie));
2041 xorl(dst, Immediate(cookie));
2042 } else {
2043 LoadObject(dst, object);
2044 }
2045}
2046
2047void Assembler::PushObject(const Object& object) {
2048 ASSERT(IsOriginalObject(object));
2049 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
2050 pushl(Immediate(target::ToRawPointer(object)));
2051 } else {
2052 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2053 ASSERT(IsInOldSpace(object));
2054 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2055 EmitUint8(0x68);
2056 buffer_.EmitObject(object);
2057 }
2058}
2059
2060void Assembler::CompareObject(Register reg, const Object& object) {
2061 ASSERT(IsOriginalObject(object));
2062 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
2063 cmpl(reg, Immediate(target::ToRawPointer(object)));
2064 } else {
2065 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2066 ASSERT(IsInOldSpace(object));
2067 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2068 if (reg == EAX) {
2069 EmitUint8(0x05 + (7 << 3));
2070 buffer_.EmitObject(object);
2071 } else {
2072 EmitUint8(0x81);
2073 EmitOperand(7, Operand(reg));
2074 buffer_.EmitObject(object);
2075 }
2076 }
2077}
2078
2080 Register value,
2081 CanBeSmi can_be_smi,
2082 Register scratch) {
2083 // x.slot = x. Barrier should have be removed at the IL level.
2084 ASSERT(object != value);
2085
2086 bool spill_scratch = false;
2087 if (scratch == kNoRegister) {
2088 spill_scratch = true;
2089 if (object != EAX && value != EAX) {
2090 scratch = EAX;
2091 } else if (object != EBX && value != EBX) {
2092 scratch = EBX;
2093 } else {
2094 scratch = ECX;
2095 }
2096 }
2097 ASSERT(scratch != object);
2098 ASSERT(scratch != value);
2099
2100 // In parallel, test whether
2101 // - object is old and not remembered and value is new, or
2102 // - object is old and value is old and not marked and concurrent marking is
2103 // in progress
2104 // If so, call the WriteBarrier stub, which will either add object to the
2105 // store buffer (case 1) or add value to the marking stack (case 2).
2106 // Compare UntaggedObject::StorePointer.
2107 Label done;
2108 if (can_be_smi == kValueCanBeSmi) {
2109 BranchIfSmi(value, &done, kNearJump);
2110 } else {
2111#if defined(DEBUG)
2112 Label passed_check;
2113 BranchIfNotSmi(value, &passed_check, kNearJump);
2114 Breakpoint();
2115 Bind(&passed_check);
2116#endif
2117 }
2118 if (spill_scratch) {
2119 pushl(scratch);
2120 }
2121 movl(scratch, FieldAddress(object, target::Object::tags_offset()));
2122 shrl(scratch, Immediate(target::UntaggedObject::kBarrierOverlapShift));
2123 andl(scratch, Address(THR, target::Thread::write_barrier_mask_offset()));
2124 testl(FieldAddress(value, target::Object::tags_offset()), scratch);
2125 if (spill_scratch) {
2126 popl(scratch);
2127 }
2128 j(ZERO, &done, kNearJump);
2129
2130 Register object_for_call = object;
2131 if (value != kWriteBarrierValueReg) {
2132 // Unlikely. Only non-graph intrinsics.
2133 // TODO(rmacnak): Shuffle registers in intrinsics.
2135 if (object == kWriteBarrierValueReg) {
2138 object_for_call = (value == EAX) ? ECX : EAX;
2139 pushl(object_for_call);
2140 movl(object_for_call, object);
2141 }
2143 }
2144 call(Address(THR, target::Thread::write_barrier_wrappers_thread_offset(
2145 object_for_call)));
2146 if (value != kWriteBarrierValueReg) {
2147 if (object == kWriteBarrierValueReg) {
2148 popl(object_for_call);
2149 }
2151 }
2152 Bind(&done);
2153}
2154
2156 Register slot,
2157 Register value,
2158 CanBeSmi can_be_smi,
2159 Register scratch) {
2160 ASSERT(object != value);
2161 ASSERT(scratch != object);
2162 ASSERT(scratch != value);
2163 ASSERT(scratch != slot);
2164
2165 // In parallel, test whether
2166 // - object is old and not remembered and value is new, or
2167 // - object is old and value is old and not marked and concurrent marking is
2168 // in progress
2169 // If so, call the WriteBarrier stub, which will either add object to the
2170 // store buffer (case 1) or add value to the marking stack (case 2).
2171 // Compare UntaggedObject::StorePointer.
2172 Label done;
2173 if (can_be_smi == kValueCanBeSmi) {
2174 BranchIfSmi(value, &done, kNearJump);
2175 } else {
2176#if defined(DEBUG)
2177 Label passed_check;
2178 BranchIfNotSmi(value, &passed_check, kNearJump);
2179 Breakpoint();
2180 Bind(&passed_check);
2181#endif
2182 }
2183 movl(scratch, FieldAddress(object, target::Object::tags_offset()));
2184 shrl(scratch, Immediate(target::UntaggedObject::kBarrierOverlapShift));
2185 andl(scratch, Address(THR, target::Thread::write_barrier_mask_offset()));
2186 testl(FieldAddress(value, target::Object::tags_offset()), scratch);
2187 j(ZERO, &done, kNearJump);
2188
2189 if ((object != kWriteBarrierObjectReg) || (value != kWriteBarrierValueReg) ||
2190 (slot != kWriteBarrierSlotReg)) {
2191 // Spill and shuffle unimplemented. Currently StoreIntoArray is only used
2192 // from StoreIndexInstr, which gets these exact registers from the register
2193 // allocator.
2194 UNIMPLEMENTED();
2195 }
2196 call(Address(THR, target::Thread::array_write_barrier_entry_point_offset()));
2197 Bind(&done);
2198}
2199
2201 Register value) {
2202 // We can't assert the incremental barrier is not needed here, only the
2203 // generational barrier. We sometimes omit the write barrier when 'value' is
2204 // a constant, but we don't eagerly mark 'value' and instead assume it is also
2205 // reachable via a constant pool, so it doesn't matter if it is not traced via
2206 // 'object'.
2207 Label done;
2208 BranchIfSmi(value, &done, kNearJump);
2209 testb(FieldAddress(value, target::Object::tags_offset()),
2210 Immediate(1 << target::UntaggedObject::kNewBit));
2212 testb(FieldAddress(object, target::Object::tags_offset()),
2213 Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
2215 Stop("Write barrier is required");
2216 Bind(&done);
2217}
2218
2220 const Address& dest,
2221 const Object& value,
2222 MemoryOrder memory_order,
2223 OperandSize size) {
2224 ASSERT_EQUAL(size, kFourBytes);
2225 ASSERT(IsOriginalObject(value));
2226 // Ignoring memory_order.
2227 // On intel stores have store-release behavior (i.e. stores are not
2228 // re-ordered with other stores).
2229 // We don't run TSAN on 32 bit systems.
2230 // Don't call StoreRelease here because we would have to load the immediate
2231 // into a temp register which causes spilling.
2232#if defined(TARGET_USES_THREAD_SANITIZER)
2233 if (memory_order == kRelease) {
2234 UNIMPLEMENTED();
2235 }
2236#endif
2237 if (target::CanEmbedAsRawPointerInGeneratedCode(value)) {
2238 Immediate imm_value(target::ToRawPointer(value));
2239 movl(dest, imm_value);
2240 } else {
2241 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2242 EmitUint8(0xC7);
2243 EmitOperand(0, dest);
2244 buffer_.EmitObject(value);
2245 }
2246 // No store buffer update.
2247}
2248
2250 const Address& dest,
2251 Register value) {
2252 movl(dest, value);
2253}
2254
2255void Assembler::StoreIntoSmiField(const Address& dest, Register value) {
2256#if defined(DEBUG)
2257 Label done;
2258 testl(value, Immediate(kHeapObjectTag));
2259 j(ZERO, &done);
2260 Stop("New value must be Smi.");
2261 Bind(&done);
2262#endif // defined(DEBUG)
2263 movl(dest, value);
2264}
2265
2266void Assembler::ZeroInitSmiField(const Address& dest) {
2267 Immediate zero(target::ToRawSmi(0));
2268 movl(dest, zero);
2269}
2270
2271void Assembler::IncrementSmiField(const Address& dest, int32_t increment) {
2272 // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
2273 // the length of this instruction sequence.
2274 Immediate inc_imm(target::ToRawSmi(increment));
2275 addl(dest, inc_imm);
2276}
2277
2278void Assembler::LoadSImmediate(XmmRegister dst, float value) {
2279 int32_t constant = bit_cast<int32_t, float>(value);
2280 pushl(Immediate(constant));
2281 movss(dst, Address(ESP, 0));
2282 addl(ESP, Immediate(target::kWordSize));
2283}
2284
2285void Assembler::LoadDImmediate(XmmRegister dst, double value) {
2286 // TODO(5410843): Need to have a code constants table.
2287 int64_t constant = bit_cast<int64_t, double>(value);
2288 pushl(Immediate(Utils::High32Bits(constant)));
2289 pushl(Immediate(Utils::Low32Bits(constant)));
2290 movsd(dst, Address(ESP, 0));
2291 addl(ESP, Immediate(2 * target::kWordSize));
2292}
2293
2294void Assembler::LoadQImmediate(XmmRegister dst, simd128_value_t value) {
2295 // TODO(5410843): Need to have a code constants table.
2296 pushl(Immediate(value.int_storage[3]));
2297 pushl(Immediate(value.int_storage[2]));
2298 pushl(Immediate(value.int_storage[1]));
2299 pushl(Immediate(value.int_storage[0]));
2300 movups(dst, Address(ESP, 0));
2301 addl(ESP, Immediate(4 * target::kWordSize));
2302}
2303
2305 static const struct ALIGN16 {
2306 uint32_t a;
2307 uint32_t b;
2308 uint32_t c;
2309 uint32_t d;
2310 } float_negate_constant = {0x80000000, 0x00000000, 0x80000000, 0x00000000};
2311 xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
2312}
2313
2315 static const struct ALIGN16 {
2316 uint64_t a;
2317 uint64_t b;
2318 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
2319 xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
2320}
2321
2323 static const struct ALIGN16 {
2324 uint64_t a;
2325 uint64_t b;
2326 } double_abs_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
2327 andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
2328}
2329
2330void Assembler::EnterFrame(intptr_t frame_size) {
2331 if (prologue_offset_ == -1) {
2332 Comment("PrologueOffset = %" Pd "", CodeSize());
2334 }
2335#ifdef DEBUG
2336 intptr_t check_offset = CodeSize();
2337#endif
2338 pushl(EBP);
2339 movl(EBP, ESP);
2340#ifdef DEBUG
2341 ProloguePattern pp(CodeAddress(check_offset));
2342 ASSERT(pp.IsValid());
2343#endif
2344 if (frame_size != 0) {
2345 Immediate frame_space(frame_size);
2346 subl(ESP, frame_space);
2347 }
2348}
2349
2350void Assembler::LeaveFrame() {
2351 movl(ESP, EBP);
2352 popl(EBP);
2353}
2354
2355void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2356 // Reserve space for arguments and align frame before entering
2357 // the C++ world.
2358 AddImmediate(ESP, Immediate(-frame_space));
2359 if (OS::ActivationFrameAlignment() > 1) {
2360 andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
2361 }
2362}
2363
2365#if defined(DEBUG)
2366 Label ok;
2367 leal(EAX, Address(EBP, target::frame_layout.exit_link_slot_from_entry_fp *
2368 target::kWordSize));
2369 cmpl(EAX, ESP);
2370 j(EQUAL, &ok);
2371 Stop("target::frame_layout.exit_link_slot_from_entry_fp mismatch");
2372 Bind(&ok);
2373#endif
2374}
2375
2376// EBX receiver, ECX ICData entries array
2377// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2380 intptr_t start = CodeSize();
2381 Label have_cid, miss;
2382 Bind(&miss);
2383 jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));
2384
2385 Comment("MonomorphicCheckedEntry");
2386 ASSERT(CodeSize() - start ==
2387 target::Instructions::kMonomorphicEntryOffsetJIT);
2388
2389 const intptr_t cid_offset = target::Array::element_offset(0);
2390 const intptr_t count_offset = target::Array::element_offset(1);
2391
2392 movl(EAX, Immediate(kSmiCid << 1));
2393 testl(EBX, Immediate(kSmiTagMask));
2394 j(ZERO, &have_cid, kNearJump);
2396 SmiTag(EAX);
2397 Bind(&have_cid);
2398 // EAX: cid as Smi
2399
2400 cmpl(EAX, FieldAddress(ECX, cid_offset));
2402 addl(FieldAddress(ECX, count_offset), Immediate(target::ToRawSmi(1)));
2403 xorl(EDX, EDX); // GC-safe for OptimizeInvokedFunction.
2404 nop(1);
2405
2406 // Fall through to unchecked entry.
2407 ASSERT(CodeSize() - start ==
2408 target::Instructions::kPolymorphicEntryOffsetJIT);
2409}
2410
2411// EBX receiver, ECX guarded cid as Smi.
2412// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2414 UNIMPLEMENTED();
2415}
2416
2419 while (CodeSize() < target::Instructions::kMonomorphicEntryOffsetJIT) {
2420 int3();
2421 }
2422 jmp(label);
2423 while (CodeSize() < target::Instructions::kPolymorphicEntryOffsetJIT) {
2424 int3();
2425 }
2426}
2427
2429 // hash += other_hash
2430 addl(dst, other);
2431 // hash += hash << 10
2432 movl(other, dst);
2433 shll(other, Immediate(10));
2434 addl(dst, other);
2435 // hash ^= hash >> 6
2436 movl(other, dst);
2437 shrl(other, Immediate(6));
2438 xorl(dst, other);
2439}
2440
2441void Assembler::FinalizeHashForSize(intptr_t bit_size,
2442 Register dst,
2443 Register scratch) {
2444 ASSERT(bit_size > 0); // Can't avoid returning 0 if there are no hash bits!
2445 // While any 32-bit hash value fits in X bits, where X > 32, the caller may
2446 // reasonably expect that the returned values fill the entire bit space.
2447 ASSERT(bit_size <= kBitsPerInt32);
2448 ASSERT(scratch != kNoRegister);
2449 // hash += hash << 3;
2450 movl(scratch, dst);
2451 shll(scratch, Immediate(3));
2452 addl(dst, scratch);
2453 // hash ^= hash >> 11; // Logical shift, unsigned hash.
2454 movl(scratch, dst);
2455 shrl(scratch, Immediate(11));
2456 xorl(dst, scratch);
2457 // hash += hash << 15;
2458 movl(scratch, dst);
2459 shll(scratch, Immediate(15));
2460 addl(dst, scratch);
2461 // Size to fit.
2462 if (bit_size < kBitsPerInt32) {
2463 andl(dst, Immediate(Utils::NBitMask(bit_size)));
2464 }
2465 // return (hash == 0) ? 1 : hash;
2466 Label done;
2468 incl(dst);
2469 Bind(&done);
2470}
2471
2473 // We generate the same number of instructions whether or not the slow-path is
2474 // forced. This simplifies GenerateJitCallbackTrampolines.
2475
2476 // Compare and swap the value at Thread::safepoint_state from unacquired
2477 // to acquired. On success, jump to 'success'; otherwise, fallthrough.
2478 Label done, slow_path;
2479 if (FLAG_use_slow_path) {
2480 jmp(&slow_path);
2481 }
2482
2483 pushl(EAX);
2484 movl(EAX, Immediate(target::Thread::full_safepoint_state_unacquired()));
2485 movl(scratch, Immediate(target::Thread::full_safepoint_state_acquired()));
2486 LockCmpxchgl(Address(THR, target::Thread::safepoint_state_offset()), scratch);
2487 movl(scratch, EAX);
2488 popl(EAX);
2489 cmpl(scratch, Immediate(target::Thread::full_safepoint_state_unacquired()));
2490
2491 if (!FLAG_use_slow_path) {
2492 j(EQUAL, &done);
2493 }
2494
2495 Bind(&slow_path);
2496 movl(scratch, Address(THR, target::Thread::enter_safepoint_stub_offset()));
2497 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2498 call(scratch);
2499
2500 Bind(&done);
2501}
2502
2503void Assembler::TransitionGeneratedToNative(Register destination_address,
2504 Register new_exit_frame,
2505 Register new_exit_through_ffi,
2506 bool enter_safepoint) {
2507 // Save exit frame information to enable stack walking.
2508 movl(Address(THR, target::Thread::top_exit_frame_info_offset()),
2509 new_exit_frame);
2510
2511 movl(compiler::Address(THR,
2512 compiler::target::Thread::exit_through_ffi_offset()),
2513 new_exit_through_ffi);
2514 Register scratch = new_exit_through_ffi;
2515
2516 // Mark that the thread is executing native code.
2517 movl(VMTagAddress(), destination_address);
2518 movl(Address(THR, target::Thread::execution_state_offset()),
2519 Immediate(target::Thread::native_execution_state()));
2520
2521 if (enter_safepoint) {
2522 EnterFullSafepoint(scratch);
2523 }
2524}
2525
2527 bool ignore_unwind_in_progress) {
2528 ASSERT(scratch != EAX);
2529 // We generate the same number of instructions whether or not the slow-path is
2530 // forced, for consistency with EnterFullSafepoint.
2531
2532 // Compare and swap the value at Thread::safepoint_state from acquired
2533 // to unacquired. On success, jump to 'success'; otherwise, fallthrough.
2534 Label done, slow_path;
2535 if (FLAG_use_slow_path) {
2536 jmp(&slow_path);
2537 }
2538
2539 pushl(EAX);
2540 movl(EAX, Immediate(target::Thread::full_safepoint_state_acquired()));
2541 movl(scratch, Immediate(target::Thread::full_safepoint_state_unacquired()));
2542 LockCmpxchgl(Address(THR, target::Thread::safepoint_state_offset()), scratch);
2543 movl(scratch, EAX);
2544 popl(EAX);
2545 cmpl(scratch, Immediate(target::Thread::full_safepoint_state_acquired()));
2546
2547 if (!FLAG_use_slow_path) {
2548 j(EQUAL, &done);
2549 }
2550
2551 Bind(&slow_path);
2552 if (ignore_unwind_in_progress) {
2553 movl(scratch,
2554 Address(THR,
2555 target::Thread::
2556 exit_safepoint_ignore_unwind_in_progress_stub_offset()));
2557 } else {
2558 movl(scratch, Address(THR, target::Thread::exit_safepoint_stub_offset()));
2559 }
2560 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2561 call(scratch);
2562
2563 Bind(&done);
2564}
2565
2567 bool exit_safepoint,
2568 bool ignore_unwind_in_progress) {
2569 if (exit_safepoint) {
2570 ExitFullSafepoint(scratch, ignore_unwind_in_progress);
2571 } else {
2572 // flag only makes sense if we are leaving safepoint
2573 ASSERT(!ignore_unwind_in_progress);
2574#if defined(DEBUG)
2575 // Ensure we've already left the safepoint.
2576 movl(scratch, Address(THR, target::Thread::safepoint_state_offset()));
2577 andl(scratch, Immediate(target::Thread::full_safepoint_state_acquired()));
2578 Label ok;
2579 j(ZERO, &ok);
2580 Breakpoint();
2581 Bind(&ok);
2582#endif
2583 }
2584
2585 // Mark that the thread is executing Dart code.
2586 movl(Assembler::VMTagAddress(), Immediate(target::Thread::vm_tag_dart_id()));
2587 movl(Address(THR, target::Thread::execution_state_offset()),
2588 Immediate(target::Thread::generated_execution_state()));
2589
2590 // Reset exit frame information in Isolate's mutator thread structure.
2591 movl(Address(THR, target::Thread::top_exit_frame_info_offset()),
2592 Immediate(0));
2593 movl(compiler::Address(THR,
2594 compiler::target::Thread::exit_through_ffi_offset()),
2595 compiler::Immediate(0));
2596}
2597
2598static constexpr intptr_t kNumberOfVolatileCpuRegisters = 3;
2599static const Register volatile_cpu_registers[kNumberOfVolatileCpuRegisters] = {
2600 EAX, ECX, EDX};
2601
2602void Assembler::CallRuntime(const RuntimeEntry& entry,
2603 intptr_t argument_count) {
2604 ASSERT(!entry.is_leaf());
2605 // Argument count is not checked here, but in the runtime entry for a more
2606 // informative error message.
2607 movl(ECX, compiler::Address(THR, entry.OffsetFromThread()));
2608 movl(EDX, compiler::Immediate(argument_count));
2609 call(Address(THR, target::Thread::call_to_runtime_entry_point_offset()));
2610}
2611
2612#define __ assembler_->
2613
2614LeafRuntimeScope::LeafRuntimeScope(Assembler* assembler,
2615 intptr_t frame_size,
2616 bool preserve_registers)
2617 : assembler_(assembler), preserve_registers_(preserve_registers) {
2618 __ Comment("EnterCallRuntimeFrame");
2619 __ EnterFrame(0);
2620
2621 if (preserve_registers_) {
2622 // Preserve volatile CPU registers.
2623 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) {
2624 __ pushl(volatile_cpu_registers[i]);
2625 }
2626
2627 // Preserve all XMM registers.
2628 __ subl(ESP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize));
2629 // Store XMM registers with the lowest register number at the lowest
2630 // address.
2631 intptr_t offset = 0;
2632 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2633 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2634 __ movups(Address(ESP, offset), xmm_reg);
2636 }
2637 } else {
2638 // These registers must always be preserved.
2639 COMPILE_ASSERT(IsCalleeSavedRegister(THR));
2640 }
2641
2642 __ ReserveAlignedFrameSpace(frame_size);
2643}
2644
2645void LeafRuntimeScope::Call(const RuntimeEntry& entry,
2646 intptr_t argument_count) {
2647 ASSERT(argument_count == entry.argument_count());
2648 __ movl(EAX, compiler::Address(THR, entry.OffsetFromThread()));
2649 __ movl(compiler::Assembler::VMTagAddress(), EAX);
2650 __ call(EAX);
2651 __ movl(compiler::Assembler::VMTagAddress(),
2652 compiler::Immediate(VMTag::kDartTagId));
2653}
2654
2655LeafRuntimeScope::~LeafRuntimeScope() {
2656 if (preserve_registers_) {
2657 // ESP might have been modified to reserve space for arguments
2658 // and ensure proper alignment of the stack frame.
2659 // We need to restore it before restoring registers.
2660 const intptr_t kPushedRegistersSize =
2661 kNumberOfVolatileCpuRegisters * target::kWordSize +
2663 __ leal(ESP, Address(EBP, -kPushedRegistersSize));
2664
2665 // Restore all XMM registers.
2666 // XMM registers have the lowest register number at the lowest address.
2667 intptr_t offset = 0;
2668 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2669 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2670 __ movups(xmm_reg, Address(ESP, offset));
2672 }
2673 __ addl(ESP, Immediate(offset));
2674
2675 // Restore volatile CPU registers.
2676 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) {
2677 __ popl(volatile_cpu_registers[i]);
2678 }
2679 }
2680
2681 __ leave();
2682}
2683
2684void Assembler::Call(const Code& target,
2685 bool movable_target,
2686 CodeEntryKind entry_kind) {
2687 LoadObject(CODE_REG, ToObject(target), movable_target);
2688 call(FieldAddress(CODE_REG, target::Code::entry_point_offset(entry_kind)));
2689}
2690
2691void Assembler::CallVmStub(const Code& target) {
2692 const Object& target_as_object = CastHandle<Object, Code>(target);
2693 ASSERT(target::CanEmbedAsRawPointerInGeneratedCode(target_as_object));
2694 call(Address::Absolute(
2695 target::ToRawPointer(target_as_object) +
2696 target::Code::entry_point_offset(CodeEntryKind::kNormal) -
2697 kHeapObjectTag));
2698}
2699
2700void Assembler::Jmp(const Code& target) {
2701 const ExternalLabel label(target::Code::EntryPointOf(target));
2702 jmp(&label);
2703}
2704
2705void Assembler::J(Condition condition, const Code& target) {
2706 const ExternalLabel label(target::Code::EntryPointOf(target));
2707 j(condition, &label);
2708}
2709
2710void Assembler::Align(intptr_t alignment, intptr_t offset) {
2711 ASSERT(Utils::IsPowerOfTwo(alignment));
2712 intptr_t pos = offset + buffer_.GetPosition();
2713 intptr_t mod = pos & (alignment - 1);
2714 if (mod == 0) {
2715 return;
2716 }
2717 intptr_t bytes_needed = alignment - mod;
2718 while (bytes_needed > MAX_NOP_SIZE) {
2719 nop(MAX_NOP_SIZE);
2720 bytes_needed -= MAX_NOP_SIZE;
2721 }
2722 if (bytes_needed != 0) {
2723 nop(bytes_needed);
2724 }
2725 ASSERT(((offset + buffer_.GetPosition()) & (alignment - 1)) == 0);
2726}
2727
2728void Assembler::Bind(Label* label) {
2729 intptr_t bound = buffer_.Size();
2730 ASSERT(!label->IsBound()); // Labels can only be bound once.
2731 while (label->IsLinked()) {
2732 intptr_t position = label->LinkPosition();
2733 intptr_t next = buffer_.Load<int32_t>(position);
2734 buffer_.Store<int32_t>(position, bound - (position + 4));
2735 label->position_ = next;
2736 }
2737 while (label->HasNear()) {
2738 intptr_t position = label->NearPosition();
2739 intptr_t offset = bound - (position + 1);
2740 ASSERT(Utils::IsInt(8, offset));
2741 buffer_.Store<int8_t>(position, offset);
2742 }
2743 label->BindTo(bound);
2744}
2745
2746void Assembler::MoveMemoryToMemory(Address dst, Address src, Register tmp) {
2747 movl(tmp, src);
2748 movl(dst, tmp);
2749}
2750
2751#ifndef PRODUCT
2752void Assembler::MaybeTraceAllocation(intptr_t cid,
2753 Label* trace,
2754 Register temp_reg,
2755 JumpDistance distance) {
2756 ASSERT(cid > 0);
2757 Address state_address(kNoRegister, 0);
2758
2759 ASSERT(temp_reg != kNoRegister);
2760 LoadIsolateGroup(temp_reg);
2761 movl(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
2762 movl(temp_reg,
2763 Address(temp_reg,
2764 target::ClassTable::allocation_tracing_state_table_offset()));
2765 cmpb(Address(temp_reg,
2766 target::ClassTable::AllocationTracingStateSlotOffsetFor(cid)),
2767 Immediate(0));
2768 // We are tracing for this class, jump to the trace label which will use
2769 // the allocation stub.
2770 j(NOT_ZERO, trace, distance);
2771}
2772#endif // !PRODUCT
2773
2774void Assembler::TryAllocateObject(intptr_t cid,
2775 intptr_t instance_size,
2776 Label* failure,
2777 JumpDistance distance,
2778 Register instance_reg,
2779 Register temp_reg) {
2780 ASSERT(failure != nullptr);
2781 ASSERT(instance_size != 0);
2782 ASSERT(Utils::IsAligned(instance_size,
2783 target::ObjectAlignment::kObjectAlignment));
2784 if (FLAG_inline_alloc &&
2785 target::Heap::IsAllocatableInNewSpace(instance_size)) {
2786 // If this allocation is traced, program will jump to failure path
2787 // (i.e. the allocation stub) which will allocate the object and trace the
2788 // allocation call site.
2789 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, temp_reg, distance));
2790 movl(instance_reg, Address(THR, target::Thread::top_offset()));
2791 addl(instance_reg, Immediate(instance_size));
2792 // instance_reg: potential next object start.
2793 cmpl(instance_reg, Address(THR, target::Thread::end_offset()));
2794 j(ABOVE_EQUAL, failure, distance);
2795 CheckAllocationCanary(instance_reg);
2796 // Successfully allocated the object, now update top to point to
2797 // next object start and store the class in the class field of object.
2798 movl(Address(THR, target::Thread::top_offset()), instance_reg);
2799 ASSERT(instance_size >= kHeapObjectTag);
2800 subl(instance_reg, Immediate(instance_size - kHeapObjectTag));
2801 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
2802 movl(FieldAddress(instance_reg, target::Object::tags_offset()),
2803 Immediate(tags));
2804 } else {
2805 jmp(failure);
2806 }
2807}
2808
2809void Assembler::TryAllocateArray(intptr_t cid,
2810 intptr_t instance_size,
2811 Label* failure,
2812 JumpDistance distance,
2813 Register instance,
2814 Register end_address,
2815 Register temp_reg) {
2816 ASSERT(failure != nullptr);
2817 ASSERT(temp_reg != kNoRegister);
2818 if (FLAG_inline_alloc &&
2819 target::Heap::IsAllocatableInNewSpace(instance_size)) {
2820 // If this allocation is traced, program will jump to failure path
2821 // (i.e. the allocation stub) which will allocate the object and trace the
2822 // allocation call site.
2823 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, temp_reg, distance));
2824 movl(instance, Address(THR, target::Thread::top_offset()));
2825 movl(end_address, instance);
2826
2827 addl(end_address, Immediate(instance_size));
2828 j(CARRY, failure);
2829
2830 // Check if the allocation fits into the remaining space.
2831 // EAX: potential new object start.
2832 // EBX: potential next object start.
2833 cmpl(end_address, Address(THR, target::Thread::end_offset()));
2834 j(ABOVE_EQUAL, failure);
2835 CheckAllocationCanary(instance);
2836
2837 // Successfully allocated the object(s), now update top to point to
2838 // next object start and initialize the object.
2839 movl(Address(THR, target::Thread::top_offset()), end_address);
2840 addl(instance, Immediate(kHeapObjectTag));
2841
2842 // Initialize the tags.
2843 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
2844 movl(FieldAddress(instance, target::Object::tags_offset()),
2845 Immediate(tags));
2846 } else {
2847 jmp(failure);
2848 }
2849}
2850
2851void Assembler::CopyMemoryWords(Register src,
2852 Register dst,
2853 Register size,
2854 Register temp) {
2855 // This loop is equivalent to
2856 // shrl(size, Immediate(target::kWordSizeLog2));
2857 // rep_movsd();
2858 // but shows better performance on certain micro-benchmarks.
2859 Label loop, done;
2860 cmpl(size, Immediate(0));
2861 j(EQUAL, &done, kNearJump);
2862 Bind(&loop);
2863 movl(temp, Address(src, 0));
2864 addl(src, Immediate(target::kWordSize));
2865 movl(Address(dst, 0), temp);
2866 addl(dst, Immediate(target::kWordSize));
2867 subl(size, Immediate(target::kWordSize));
2868 j(NOT_ZERO, &loop, kNearJump);
2869 Bind(&done);
2870}
2871
2872void Assembler::PushCodeObject() {
2873 DEBUG_ASSERT(IsNotTemporaryScopedHandle(code_));
2874 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2875 EmitUint8(0x68);
2876 buffer_.EmitObject(code_);
2877}
2878
2879void Assembler::EnterDartFrame(intptr_t frame_size) {
2880 EnterFrame(0);
2881
2882 PushCodeObject();
2883
2884 if (frame_size != 0) {
2885 subl(ESP, Immediate(frame_size));
2886 }
2887}
2888
2889void Assembler::LeaveDartFrame() {
2890 LeaveFrame();
2891}
2892
2893// On entry to a function compiled for OSR, the caller's frame pointer, the
2894// stack locals, and any copied parameters are already in place. The frame
2895// pointer is already set up. There may be extra space for spill slots to
2896// allocate.
2897void Assembler::EnterOsrFrame(intptr_t extra_size) {
2898 Comment("EnterOsrFrame");
2899 if (prologue_offset_ == -1) {
2900 Comment("PrologueOffset = %" Pd "", CodeSize());
2901 prologue_offset_ = CodeSize();
2902 }
2903
2904 if (extra_size != 0) {
2905 subl(ESP, Immediate(extra_size));
2906 }
2907}
2908
2909void Assembler::EnterStubFrame() {
2910 EnterDartFrame(0);
2911}
2912
2913void Assembler::LeaveStubFrame() {
2914 LeaveDartFrame();
2915}
2916
2917void Assembler::EnterCFrame(intptr_t frame_space) {
2918 // Already saved.
2919 COMPILE_ASSERT(IsCalleeSavedRegister(THR));
2920
2921 EnterFrame(0);
2922 ReserveAlignedFrameSpace(frame_space);
2923}
2924
2925void Assembler::LeaveCFrame() {
2926 LeaveFrame();
2927}
2928
2929void Assembler::EmitOperand(int rm, const Operand& operand) {
2930 ASSERT(rm >= 0 && rm < 8);
2931 const intptr_t length = operand.length_;
2932 ASSERT(length > 0);
2933 // Emit the ModRM byte updated with the given RM value.
2934 ASSERT((operand.encoding_[0] & 0x38) == 0);
2935 EmitUint8(operand.encoding_[0] + (rm << 3));
2936 // Emit the rest of the encoded operand.
2937 for (intptr_t i = 1; i < length; i++) {
2938 EmitUint8(operand.encoding_[i]);
2939 }
2940}
2941
2942void Assembler::EmitImmediate(const Immediate& imm) {
2943 EmitInt32(imm.value());
2944}
2945
2946void Assembler::EmitComplex(int rm,
2947 const Operand& operand,
2948 const Immediate& immediate) {
2949 ASSERT(rm >= 0 && rm < 8);
2950 if (immediate.is_int8()) {
2951 // Use sign-extended 8-bit immediate.
2952 EmitUint8(0x83);
2953 EmitOperand(rm, operand);
2954 EmitUint8(immediate.value() & 0xFF);
2955 } else if (operand.IsRegister(EAX)) {
2956 // Use short form if the destination is eax.
2957 EmitUint8(0x05 + (rm << 3));
2958 EmitImmediate(immediate);
2959 } else {
2960 EmitUint8(0x81);
2961 EmitOperand(rm, operand);
2962 EmitImmediate(immediate);
2963 }
2964}
2965
2966void Assembler::EmitLabel(Label* label, intptr_t instruction_size) {
2967 if (label->IsBound()) {
2968 intptr_t offset = label->Position() - buffer_.Size();
2969 ASSERT(offset <= 0);
2970 EmitInt32(offset - instruction_size);
2971 } else {
2972 EmitLabelLink(label);
2973 }
2974}
2975
2976void Assembler::EmitLabelLink(Label* label) {
2977 ASSERT(!label->IsBound());
2978 intptr_t position = buffer_.Size();
2979 EmitInt32(label->position_);
2980 label->LinkTo(position);
2981}
2982
2983void Assembler::EmitNearLabelLink(Label* label) {
2984 ASSERT(!label->IsBound());
2985 intptr_t position = buffer_.Size();
2986 EmitUint8(0);
2987 label->NearLinkTo(position);
2988}
2989
2990void Assembler::EmitGenericShift(int rm, Register reg, const Immediate& imm) {
2991 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2992 ASSERT(imm.is_int8());
2993 if (imm.value() == 1) {
2994 EmitUint8(0xD1);
2995 EmitOperand(rm, Operand(reg));
2996 } else {
2997 EmitUint8(0xC1);
2998 EmitOperand(rm, Operand(reg));
2999 EmitUint8(imm.value() & 0xFF);
3000 }
3001}
3002
3003void Assembler::EmitGenericShift(int rm,
3004 const Operand& operand,
3005 Register shifter) {
3006 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
3007 ASSERT(shifter == ECX);
3008 EmitUint8(0xD3);
3009 EmitOperand(rm, Operand(operand));
3010}
3011
3012void Assembler::LoadClassId(Register result, Register object) {
3013 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3014 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3015 movl(result, FieldAddress(object, target::Object::tags_offset()));
3016 shrl(result, Immediate(target::UntaggedObject::kClassIdTagPos));
3017}
3018
3019void Assembler::LoadClassById(Register result, Register class_id) {
3020 ASSERT(result != class_id);
3021
3022 const intptr_t table_offset =
3023 target::IsolateGroup::cached_class_table_table_offset();
3024 LoadIsolateGroup(result);
3025 movl(result, Address(result, table_offset));
3026 movl(result, Address(result, class_id, TIMES_4, 0));
3027}
3028
3029void Assembler::CompareClassId(Register object,
3030 intptr_t class_id,
3031 Register scratch) {
3032 LoadClassId(scratch, object);
3033 cmpl(scratch, Immediate(class_id));
3034}
3035
3036void Assembler::SmiUntagOrCheckClass(Register object,
3037 intptr_t class_id,
3038 Register scratch,
3039 Label* is_smi) {
3040 ASSERT(kSmiTagShift == 1);
3041 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3042 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3043 // Untag optimistically. Tag bit is shifted into the CARRY.
3044 SmiUntag(object);
3045 j(NOT_CARRY, is_smi, kNearJump);
3046 // Load cid: can't use LoadClassId, object is untagged. Use TIMES_2 scale
3047 // factor in the addressing mode to compensate for this.
3048 movl(scratch, Address(object, TIMES_2,
3049 target::Object::tags_offset() + kHeapObjectTag));
3050 shrl(scratch, Immediate(target::UntaggedObject::kClassIdTagPos));
3051 cmpl(scratch, Immediate(class_id));
3052}
3053
3054void Assembler::LoadClassIdMayBeSmi(Register result, Register object) {
3055 if (result == object) {
3056 Label smi, join;
3057
3058 testl(object, Immediate(kSmiTagMask));
3059 j(EQUAL, &smi, Assembler::kNearJump);
3060 LoadClassId(result, object);
3061 jmp(&join, Assembler::kNearJump);
3062
3063 Bind(&smi);
3064 movl(result, Immediate(kSmiCid));
3065
3066 Bind(&join);
3067 } else {
3068 ASSERT(result != object);
3069 static const intptr_t kSmiCidSource =
3070 kSmiCid << target::UntaggedObject::kClassIdTagPos;
3071
3072 // Make a dummy "Object" whose cid is kSmiCid.
3073 movl(result, Immediate(reinterpret_cast<int32_t>(&kSmiCidSource) + 1));
3074
3075 // Check if object (in tmp) is a Smi.
3076 testl(object, Immediate(kSmiTagMask));
3077
3078 // If the object is not a Smi, use the original object to load the cid.
3079 // Otherwise, the dummy object is used, and the result is kSmiCid.
3080 cmovne(result, object);
3081 LoadClassId(result, result);
3082 }
3083}
3084
3085void Assembler::LoadTaggedClassIdMayBeSmi(Register result, Register object) {
3086 if (result == object) {
3087 Label smi, join;
3088
3089 testl(object, Immediate(kSmiTagMask));
3090 j(EQUAL, &smi, Assembler::kNearJump);
3091 LoadClassId(result, object);
3092 SmiTag(result);
3093 jmp(&join, Assembler::kNearJump);
3094
3095 Bind(&smi);
3096 movl(result, Immediate(target::ToRawSmi(kSmiCid)));
3097
3098 Bind(&join);
3099 } else {
3100 LoadClassIdMayBeSmi(result, object);
3101 SmiTag(result);
3102 }
3103}
3104
3105void Assembler::EnsureHasClassIdInDEBUG(intptr_t cid,
3106 Register src,
3107 Register scratch,
3108 bool can_be_null) {
3109#if defined(DEBUG)
3110 Comment("Check that object in register has cid %" Pd "", cid);
3111 Label matches;
3112 LoadClassIdMayBeSmi(scratch, src);
3113 CompareImmediate(scratch, cid);
3114 BranchIf(EQUAL, &matches, Assembler::kNearJump);
3115 if (can_be_null) {
3116 CompareImmediate(scratch, kNullCid);
3117 BranchIf(EQUAL, &matches, Assembler::kNearJump);
3118 }
3119 Breakpoint();
3120 Bind(&matches);
3121#endif
3122}
3123
3124bool Assembler::AddressCanHoldConstantIndex(const Object& constant,
3125 bool is_external,
3126 intptr_t cid,
3127 intptr_t index_scale) {
3128 if (!IsSafeSmi(constant)) return false;
3129 const int64_t index = target::SmiValue(constant);
3130 const int64_t offset =
3131 is_external ? 0 : (target::Instance::DataOffsetFor(cid) - kHeapObjectTag);
3132 const int64_t disp = index * index_scale + offset;
3133 return Utils::IsInt(32, disp);
3134}
3135
3136Address Assembler::ElementAddressForIntIndex(bool is_external,
3137 intptr_t cid,
3138 intptr_t index_scale,
3139 Register array,
3140 intptr_t index,
3141 intptr_t extra_disp) {
3142 if (is_external) {
3143 return Address(array, index * index_scale + extra_disp);
3144 } else {
3145 const int64_t disp = static_cast<int64_t>(index) * index_scale +
3146 target::Instance::DataOffsetFor(cid) + extra_disp;
3147 ASSERT(Utils::IsInt(32, disp));
3148 return FieldAddress(array, static_cast<int32_t>(disp));
3149 }
3150}
3151
3152Address Assembler::ElementAddressForRegIndex(bool is_external,
3153 intptr_t cid,
3154 intptr_t index_scale,
3155 bool index_unboxed,
3156 Register array,
3157 Register index,
3158 intptr_t extra_disp) {
3159 if (is_external) {
3160 return Address(array, index, ToScaleFactor(index_scale, index_unboxed),
3161 extra_disp);
3162 } else {
3163 return FieldAddress(array, index, ToScaleFactor(index_scale, index_unboxed),
3164 target::Instance::DataOffsetFor(cid) + extra_disp);
3165 }
3166}
3167
3168void Assembler::RangeCheck(Register value,
3169 Register temp,
3170 intptr_t low,
3171 intptr_t high,
3172 RangeCheckCondition condition,
3173 Label* target) {
3174 auto cc = condition == kIfInRange ? BELOW_EQUAL : ABOVE;
3175 Register to_check = value;
3176 if (temp != kNoRegister) {
3177 movl(temp, value);
3178 to_check = temp;
3179 }
3180 subl(to_check, Immediate(low));
3181 cmpl(to_check, Immediate(high - low));
3182 j(cc, target);
3183}
3184
3185} // namespace compiler
3186} // namespace dart
3187
3188#endif // defined(TARGET_ARCH_IA32)
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
int count
SkPoint pos
static float next(float f)
static bool ok(int result)
#define EQUAL(field)
static bool equals(T *a, T *b)
#define __
#define UNREACHABLE()
Definition assert.h:248
#define DEBUG_ASSERT(cond)
Definition assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
#define COMPILE_ASSERT(expr)
Definition assert.h:339
static intptr_t ActivationFrameAlignment()
static bool sse4_1_supported()
Definition cpu_ia32.h:61
static bool popcnt_supported()
Definition cpu_ia32.h:62
static bool abm_supported()
Definition cpu_ia32.h:63
static bool IsInt(intptr_t N, T value)
Definition utils.h:298
static int32_t Low32Bits(int64_t value)
Definition utils.h:354
static constexpr T NBitMask(size_t n)
Definition utils.h:533
static int32_t High32Bits(int64_t value)
Definition utils.h:358
static Address Absolute(const uword addr)
void Stop(const char *message)
uword CodeAddress(intptr_t offset)
void Comment(const char *format,...) PRINTF_ATTRIBUTE(2
void PushRegistersInOrder(std::initializer_list< Register > regs)
void maxps(XmmRegister dst, XmmRegister src)
void jmp(Register reg)
void unpcklpd(XmmRegister dst, XmmRegister src)
void FloatNegate(XmmRegister f)
void cvtdq2pd(XmmRegister dst, XmmRegister src)
void minpd(XmmRegister dst, XmmRegister src)
void fildl(const Address &src)
void notl(Register reg)
void CombineHashes(Register dst, Register other) override
void cmovns(Register dst, Register src)
void BranchIfSmi(Register reg, Label *label, JumpDistance distance=kFarJump) override
void movups(XmmRegister dst, const Address &src)
void idivl(Register reg)
void Load(Register reg, const Address &address, OperandSize type, Condition cond)
void unpckhpd(XmmRegister dst, XmmRegister src)
void movq(const Address &dst, XmmRegister src)
void shldl(Register dst, Register src, Register shifter)
void divl(Register reg)
void LoadIsolate(Register rd)
void mulss(XmmRegister dst, XmmRegister src)
void IncrementSmiField(const Address &dest, int32_t increment)
void pxor(XmmRegister dst, XmmRegister src)
void movlhps(XmmRegister dst, XmmRegister src)
void ZeroInitSmiField(const Address &dest)
void pcmpeqq(XmmRegister dst, XmmRegister src)
void lzcntl(Register dst, Register src)
void decl(Register reg)
void cvtss2si(Register dst, XmmRegister src)
void movd(XmmRegister dst, Register src)
void negateps(XmmRegister dst)
void movl(Register dst, const Immediate &src)
void cvtsi2ss(XmmRegister dst, Register src)
void filds(const Address &src)
void j(Condition condition, Label *label, JumpDistance distance=kFarJump)
void maxpd(XmmRegister dst, XmmRegister src)
void addpd(XmmRegister dst, XmmRegister src)
void LoadFromStack(Register dst, intptr_t depth)
void movmskpd(Register dst, XmmRegister src)
void leal(Register dst, const Address &src)
void shufps(XmmRegister dst, XmmRegister src, const Immediate &mask)
void zerowps(XmmRegister dst)
void shll(Register reg, const Immediate &imm)
void shufpd(XmmRegister dst, XmmRegister src, const Immediate &mask)
void pushl(Register reg)
void negl(Register reg)
void PushRegister(Register r)
void cvtpd2ps(XmmRegister dst, XmmRegister src)
void set1ps(XmmRegister dst, Register tmp, const Immediate &imm)
void orps(XmmRegister dst, XmmRegister src)
void mulps(XmmRegister dst, XmmRegister src)
void divsd(XmmRegister dst, XmmRegister src)
void roundsd(XmmRegister dst, XmmRegister src, RoundingMode mode)
void cmove(Register dst, Register src)
void pextrd(Register dst, XmmRegister src, const Immediate &imm)
void cvtps2pd(XmmRegister dst, XmmRegister src)
void LoadDImmediate(DRegister dd, double value, Register scratch, Condition cond=AL)
void subpl(XmmRegister dst, XmmRegister src)
void mull(Register reg)
void cmppsneq(XmmRegister dst, XmmRegister src)
void mulsd(XmmRegister dst, XmmRegister src)
void divpd(XmmRegister dst, XmmRegister src)
void movmskps(Register dst, XmmRegister src)
void sqrtss(XmmRegister dst, XmmRegister src)
void rsqrtps(XmmRegister dst)
void cmppseq(XmmRegister dst, XmmRegister src)
void StoreObjectIntoObjectNoBarrier(Register object, const Address &dest, const Object &value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes) override
void cvtss2sd(XmmRegister dst, XmmRegister src)
void movzxw(Register dst, Register src)
void cmovs(Register dst, Register src)
void cmpw(Register rn, Operand o)
void xorpd(XmmRegister dst, const Address &src)
void movsd(XmmRegister dst, const Address &src)
void fistpl(const Address &dst)
void cmppsnle(XmmRegister dst, XmmRegister src)
void TransitionNativeToGenerated(Register scratch0, Register scratch1, bool exit_safepoint, bool ignore_unwind_in_progress=false)
void flds(const Address &src)
void BranchIf(Condition condition, Label *label, JumpDistance distance=kFarJump)
void CompareObject(Register rn, const Object &object)
void bt(Register base, Register offset)
void comisd(XmmRegister a, XmmRegister b)
void fstps(const Address &dst)
void PushObject(const Object &object)
void pmovsxdq(XmmRegister dst, XmmRegister src)
void AndRegisters(Register dst, Register src1, Register src2=kNoRegister) override
void sqrtpd(XmmRegister dst)
void testb(const Address &address, const Immediate &imm)
void sarl(Register reg, const Immediate &imm)
void LoadQImmediate(QRegister dd, simd128_value_t value)
void divss(XmmRegister dst, XmmRegister src)
void testl(Register reg1, Register reg2)
void cvttsd2si(Register dst, XmmRegister src)
void incl(Register reg)
void StoreToStack(Register src, intptr_t depth)
void subpd(XmmRegister dst, XmmRegister src)
void StoreInternalPointer(Register object, const Address &dest, Register value)
void cmppslt(XmmRegister dst, XmmRegister src)
void sqrtsd(XmmRegister dst, XmmRegister src)
void andps(XmmRegister dst, XmmRegister src)
void Bind(Label *label) override
void ReserveAlignedFrameSpace(intptr_t frame_space)
void cmppsle(XmmRegister dst, XmmRegister src)
void cmovno(Register dst, Register src)
void andpd(XmmRegister dst, const Address &src)
void cmovne(Register dst, Register src)
void LoadClassId(Register result, Register object, Condition cond=AL)
void enter(const Immediate &imm)
void setcc(Condition condition, ByteRegister dst)
void cvtsi2sd(XmmRegister dst, Register src)
void PopRegister(Register r)
void ArrayStoreBarrier(Register object, Register slot, Register value, CanBeSmi can_be_smi, Register scratch) override
void xchgl(Register dst, Register src)
void subss(XmmRegister dst, XmmRegister src)
void xorps(XmmRegister dst, const Address &src)
void ExitFullSafepoint(Register scratch0, Register scratch1, bool ignore_unwind_in_progress)
void subps(XmmRegister dst, XmmRegister src)
void popcntl(Register dst, Register src)
void addpl(XmmRegister dst, XmmRegister src)
void CallRuntime(const RuntimeEntry &entry, intptr_t argument_count)
void LockCmpxchgl(const Address &address, Register reg)
void LoadObject(Register rd, const Object &object, Condition cond=AL)
void CompareRegisters(Register rn, Register rm)
void fnstcw(const Address &dst)
void negatepd(XmmRegister dst)
void CompareToStack(Register src, intptr_t depth)
void addsd(XmmRegister dst, XmmRegister src)
void fldcw(const Address &src)
static Address VMTagAddress()
void cvtsd2ss(XmmRegister dst, XmmRegister src)
void cvtsd2si(Register dst, XmmRegister src)
void bsfl(Register dst, Register src)
void notps(XmmRegister dst)
void absps(XmmRegister dst)
void subsd(XmmRegister dst, XmmRegister src)
static bool IsSafeSmi(const Object &object)
void movsxb(Register dst, ByteRegister src)
void TransitionGeneratedToNative(Register destination_address, Register exit_frame_fp, Register exit_through_ffi, Register scratch0, bool enter_safepoint)
void SubImmediate(Register rd, Register rn, int32_t value, Condition cond=AL)
void Drop(intptr_t stack_elements)
void imull(Register dst, Register src)
void DoubleAbs(XmmRegister reg)
void reciprocalps(XmmRegister dst)
void minps(XmmRegister dst, XmmRegister src)
void StoreBarrier(Register object, Register value, CanBeSmi can_be_smi, Register scratch) override
void SmiTag(Register reg, Condition cond)
void VerifyStoreNeedsNoWriteBarrier(Register object, Register value) override
void CompareWords(Register reg1, Register reg2, intptr_t offset, Register count, Register temp, Label *equals) override
void addss(XmmRegister dst, XmmRegister src)
void ExtendValue(Register rd, Register rm, OperandSize sz, Condition cond)
void mulpd(XmmRegister dst, XmmRegister src)
void pmovmskb(Register dst, XmmRegister src)
void movhlps(XmmRegister dst, XmmRegister src)
void movb(Register dst, const Address &src)
void BranchIfNotSmi(Register reg, Label *label, JumpDistance distance=kFarJump)
void divps(XmmRegister dst, XmmRegister src)
void popl(Register reg)
void abspd(XmmRegister dst)
void StoreIntoSmiField(const Address &dest, Register value)
void addps(XmmRegister dst, XmmRegister src)
void fistps(const Address &dst)
void LoadSImmediate(SRegister sd, float value, Condition cond=AL)
void DoubleNegate(XmmRegister d)
void shrl(Register reg, const Immediate &imm)
void cvttss2si(Register dst, XmmRegister src)
void LoadIsolateGroup(Register dst)
void cmovgel(Register dst, Register src)
void BranchOnMonomorphicCheckedEntryJIT(Label *label)
void Store(Register reg, const Address &address, OperandSize type, Condition cond)
void movzxb(Register dst, ByteRegister src)
void movss(XmmRegister dst, const Address &src)
void cmpxchgl(const Address &address, Register reg)
void MoveRegister(Register rd, Register rm, Condition cond)
static constexpr intptr_t kCallExternalLabelSize
void EnterFrame(RegList regs, intptr_t frame_space)
void sqrtps(XmmRegister dst)
void fldl(const Address &src)
void unpckhps(XmmRegister dst, XmmRegister src)
void cmovlessl(Register dst, Register src)
void FinalizeHashForSize(intptr_t bit_size, Register dst, Register scratch=TMP) override
void orpd(XmmRegister dst, XmmRegister src)
void cmpb(const Address &address, const Immediate &imm)
void call(Register reg)
void LoadObjectSafely(Register dst, const Object &object)
void shrdl(Register dst, Register src, Register shifter)
void AddImmediate(Register rd, int32_t value, Condition cond=AL)
void cmppsnlt(XmmRegister dst, XmmRegister src)
void movsxw(Register dst, Register src)
void fstpl(const Address &dst)
void bsrl(Register dst, Register src)
void unpcklps(XmmRegister dst, XmmRegister src)
void comiss(XmmRegister a, XmmRegister b)
void ffree(intptr_t value)
void ArithmeticShiftRightImmediate(Register reg, intptr_t shift) override
void movaps(XmmRegister dst, XmmRegister src)
LeafRuntimeScope(Assembler *assembler, intptr_t frame_size, bool preserve_registers)
static constexpr int kSize
#define UNIMPLEMENTED
#define ASSERT(E)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition main.cc:19
VkInstance instance
Definition main.cc:48
static bool b
struct MyStruct a[10]
#define FATAL(error)
uint8_t value
GAsyncResult * result
uint32_t * target
int argument_count
Definition fuchsia.cc:52
size_t length
ClipOpAndAA opAA SkRegion region
Definition SkRecords.h:238
word ToRawSmi(const dart::Object &a)
bool IsSmi(int64_t v)
bool IsOriginalObject(const Object &object)
int32_t CreateJitCookie()
bool IsInOldSpace(const Object &obj)
const Object & ToObject(const Code &handle)
const Register kWriteBarrierSlotReg
const Register THR
static const struct dart::ALIGN16 float_negate_constant
static const struct dart::ALIGN16 float_not_constant
const Register kWriteBarrierObjectReg
constexpr int32_t kMinInt32
Definition globals.h:482
static const struct dart::ALIGN16 float_absolute_constant
const Register kWriteBarrierValueReg
static const struct dart::ALIGN16 double_negate_constant
static const struct dart::ALIGN16 float_zerow_constant
@ kHeapObjectTag
uintptr_t uword
Definition globals.h:501
@ BELOW_EQUAL
@ kNoRegister
constexpr intptr_t kBitsPerInt32
Definition globals.h:466
const int MAX_NOP_SIZE
static const struct dart::ALIGN16 double_abs_constant
const int kFpuRegisterSize
ByteRegister ByteRegisterOf(Register reg)
@ kNumberOfXmmRegisters
call(args)
Definition dom.py:159
dst
Definition cp.py:12
SINT Vec< 2 *N, T > join(const Vec< N, T > &lo, const Vec< N, T > &hi)
Definition SkVx.h:242
#define Pd
Definition globals.h:408
Point offset
#define NOT_IN_PRODUCT(code)
Definition globals.h:84
#define ALIGN16
Definition globals.h:172