Flutter Engine
The Flutter Engine
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
assembler_ia32.cc
Go to the documentation of this file.
1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/globals.h" // NOLINT
6#if defined(TARGET_ARCH_IA32)
7
8#define SHOULD_NOT_INCLUDE_RUNTIME
9
10#include "vm/class_id.h"
13#include "vm/cpu.h"
14#include "vm/instructions.h"
15#include "vm/tags.h"
16
17namespace dart {
18
19namespace compiler {
20
21class DirectCallRelocation : public AssemblerFixup {
22 public:
23 void Process(const MemoryRegion& region, intptr_t position) {
24 // Direct calls are relative to the following instruction on x86.
25 int32_t pointer = region.Load<int32_t>(position);
26 int32_t delta = region.start() + position + sizeof(int32_t);
27 region.Store<int32_t>(position, pointer - delta);
28 }
29
30 virtual bool IsPointerOffset() const { return false; }
31};
32
33int32_t Assembler::jit_cookie() {
34 if (jit_cookie_ == 0) {
35 jit_cookie_ = CreateJitCookie();
36 }
37 return jit_cookie_;
38}
39
40void Assembler::call(Register reg) {
41 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
42 EmitUint8(0xFF);
43 EmitRegisterOperand(2, reg);
44}
45
46void Assembler::call(const Address& address) {
47 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
48 EmitUint8(0xFF);
49 EmitOperand(2, address);
50}
51
52void Assembler::call(Label* label) {
53 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
54 EmitUint8(0xE8);
55 const int kSize = 5;
56 EmitLabel(label, kSize);
57}
58
59void Assembler::call(const ExternalLabel* label) {
60 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
61 intptr_t call_start = buffer_.GetPosition();
62 EmitUint8(0xE8);
63 EmitFixup(new DirectCallRelocation());
64 EmitInt32(label->address());
66}
67
68void Assembler::pushl(Register reg) {
69 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
70 EmitUint8(0x50 + reg);
71}
72
73void Assembler::pushl(const Address& address) {
74 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
75 EmitUint8(0xFF);
76 EmitOperand(6, address);
77}
78
79void Assembler::pushl(const Immediate& imm) {
80 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
81 if (imm.is_int8()) {
82 EmitUint8(0x6A);
83 EmitUint8(imm.value() & 0xFF);
84 } else {
85 EmitUint8(0x68);
86 EmitImmediate(imm);
87 }
88}
89
90void Assembler::popl(Register reg) {
91 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
92 EmitUint8(0x58 + reg);
93}
94
95void Assembler::popl(const Address& address) {
96 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
97 EmitUint8(0x8F);
98 EmitOperand(0, address);
99}
100
101void Assembler::pushal() {
102 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
103 EmitUint8(0x60);
104}
105
106void Assembler::popal() {
107 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
108 EmitUint8(0x61);
109}
110
112 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
113 EmitUint8(0x0F);
114 EmitUint8(0x90 + condition);
115 EmitUint8(0xC0 + dst);
116}
117
118void Assembler::movl(Register dst, const Immediate& imm) {
119 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
120 EmitUint8(0xB8 + dst);
121 EmitImmediate(imm);
122}
123
125 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
126 EmitUint8(0x89);
127 EmitRegisterOperand(src, dst);
128}
129
130void Assembler::movl(Register dst, const Address& src) {
131 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
132 EmitUint8(0x8B);
133 EmitOperand(dst, src);
134}
135
136void Assembler::movl(const Address& dst, Register src) {
137 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
138 EmitUint8(0x89);
139 EmitOperand(src, dst);
140}
141
142void Assembler::movl(const Address& dst, const Immediate& imm) {
143 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
144 EmitUint8(0xC7);
145 EmitOperand(0, dst);
146 EmitImmediate(imm);
147}
148
150 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
151 EmitUint8(0x0F);
152 EmitUint8(0xB6);
153 EmitRegisterOperand(dst, src);
154}
155
156void Assembler::movzxb(Register dst, const Address& src) {
157 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
158 EmitUint8(0x0F);
159 EmitUint8(0xB6);
160 EmitOperand(dst, src);
161}
162
164 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
165 EmitUint8(0x0F);
166 EmitUint8(0xBE);
167 EmitRegisterOperand(dst, src);
168}
169
170void Assembler::movsxb(Register dst, const Address& src) {
171 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
172 EmitUint8(0x0F);
173 EmitUint8(0xBE);
174 EmitOperand(dst, src);
175}
176
177void Assembler::movb(Register dst, const Address& src) {
178 // This would leave 24 bits above the 1 byte value undefined.
179 // If we ever want to purposefully have those undefined, remove this.
180 // TODO(dartbug.com/40210): Allow this.
181 FATAL("Use movzxb or movsxb instead.");
182}
183
184void Assembler::movb(const Address& dst, Register src) {
185 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
186 EmitUint8(0x88);
187 EmitOperand(src, dst);
188}
189
190void Assembler::movb(const Address& dst, ByteRegister src) {
191 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
192 EmitUint8(0x88);
193 EmitOperand(src, dst);
194}
195
196void Assembler::movb(const Address& dst, const Immediate& imm) {
197 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
198 EmitUint8(0xC6);
199 EmitOperand(EAX, dst);
200 ASSERT(imm.is_int8());
201 EmitUint8(imm.value() & 0xFF);
202}
203
205 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
206 EmitUint8(0x0F);
207 EmitUint8(0xB7);
208 EmitRegisterOperand(dst, src);
209}
210
211void Assembler::movzxw(Register dst, const Address& src) {
212 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
213 EmitUint8(0x0F);
214 EmitUint8(0xB7);
215 EmitOperand(dst, src);
216}
217
219 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
220 EmitUint8(0x0F);
221 EmitUint8(0xBF);
222 EmitRegisterOperand(dst, src);
223}
224
225void Assembler::movsxw(Register dst, const Address& src) {
226 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
227 EmitUint8(0x0F);
228 EmitUint8(0xBF);
229 EmitOperand(dst, src);
230}
231
232void Assembler::movw(Register dst, const Address& src) {
233 // This would leave 16 bits above the 2 byte value undefined.
234 // If we ever want to purposefully have those undefined, remove this.
235 // TODO(dartbug.com/40210): Allow this.
236 FATAL("Use movzxw or movsxw instead.");
237}
238
239void Assembler::movw(const Address& dst, Register src) {
240 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
241 EmitOperandSizeOverride();
242 EmitUint8(0x89);
243 EmitOperand(src, dst);
244}
245
246void Assembler::movw(const Address& dst, const Immediate& imm) {
247 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
248 EmitOperandSizeOverride();
249 EmitUint8(0xC7);
250 EmitOperand(0, dst);
251 EmitUint8(imm.value() & 0xFF);
252 EmitUint8((imm.value() >> 8) & 0xFF);
253}
254
255void Assembler::leal(Register dst, const Address& src) {
256 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
257 EmitUint8(0x8D);
258 EmitOperand(dst, src);
259}
260
261// Move if not overflow.
263 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
264 EmitUint8(0x0F);
265 EmitUint8(0x41);
266 EmitRegisterOperand(dst, src);
267}
268
270 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
271 EmitUint8(0x0F);
272 EmitUint8(0x44);
273 EmitRegisterOperand(dst, src);
274}
275
277 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
278 EmitUint8(0x0F);
279 EmitUint8(0x45);
280 EmitRegisterOperand(dst, src);
281}
282
284 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
285 EmitUint8(0x0F);
286 EmitUint8(0x48);
287 EmitRegisterOperand(dst, src);
288}
289
291 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
292 EmitUint8(0x0F);
293 EmitUint8(0x49);
294 EmitRegisterOperand(dst, src);
295}
296
298 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
299 EmitUint8(0x0F);
300 EmitUint8(0x4D);
301 EmitRegisterOperand(dst, src);
302}
303
305 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
306 EmitUint8(0x0F);
307 EmitUint8(0x4C);
308 EmitRegisterOperand(dst, src);
309}
310
312 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
313 EmitUint8(0xF3);
314 EmitUint8(0xA4);
315}
316
318 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
319 EmitUint8(0xF3);
320 EmitUint8(0x66);
321 EmitUint8(0xA5);
322}
323
325 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
326 EmitUint8(0xF3);
327 EmitUint8(0xA5);
328}
329
330void Assembler::movss(XmmRegister dst, const Address& src) {
331 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
332 EmitUint8(0xF3);
333 EmitUint8(0x0F);
334 EmitUint8(0x10);
335 EmitOperand(dst, src);
336}
337
338void Assembler::movss(const Address& dst, XmmRegister src) {
339 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
340 EmitUint8(0xF3);
341 EmitUint8(0x0F);
342 EmitUint8(0x11);
343 EmitOperand(src, dst);
344}
345
347 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
348 EmitUint8(0xF3);
349 EmitUint8(0x0F);
350 EmitUint8(0x11);
351 EmitXmmRegisterOperand(src, dst);
352}
353
355 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
356 EmitUint8(0x66);
357 EmitUint8(0x0F);
358 EmitUint8(0x6E);
359 EmitOperand(dst, Operand(src));
360}
361
363 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
364 EmitUint8(0x66);
365 EmitUint8(0x0F);
366 EmitUint8(0x7E);
367 EmitOperand(src, Operand(dst));
368}
369
370void Assembler::movq(const Address& dst, XmmRegister src) {
371 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
372 EmitUint8(0x66);
373 EmitUint8(0x0F);
374 EmitUint8(0xD6);
375 EmitOperand(src, Operand(dst));
376}
377
378void Assembler::movq(XmmRegister dst, const Address& src) {
379 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
380 EmitUint8(0xF3);
381 EmitUint8(0x0F);
382 EmitUint8(0x7E);
383 EmitOperand(dst, Operand(src));
384}
385
387 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
388 EmitUint8(0xF3);
389 EmitUint8(0x0F);
390 EmitUint8(0x58);
391 EmitXmmRegisterOperand(dst, src);
392}
393
394void Assembler::addss(XmmRegister dst, const Address& src) {
395 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
396 EmitUint8(0xF3);
397 EmitUint8(0x0F);
398 EmitUint8(0x58);
399 EmitOperand(dst, src);
400}
401
403 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
404 EmitUint8(0xF3);
405 EmitUint8(0x0F);
406 EmitUint8(0x5C);
407 EmitXmmRegisterOperand(dst, src);
408}
409
410void Assembler::subss(XmmRegister dst, const Address& src) {
411 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
412 EmitUint8(0xF3);
413 EmitUint8(0x0F);
414 EmitUint8(0x5C);
415 EmitOperand(dst, src);
416}
417
419 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
420 EmitUint8(0xF3);
421 EmitUint8(0x0F);
422 EmitUint8(0x59);
423 EmitXmmRegisterOperand(dst, src);
424}
425
426void Assembler::mulss(XmmRegister dst, const Address& src) {
427 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
428 EmitUint8(0xF3);
429 EmitUint8(0x0F);
430 EmitUint8(0x59);
431 EmitOperand(dst, src);
432}
433
435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
436 EmitUint8(0xF3);
437 EmitUint8(0x0F);
438 EmitUint8(0x5E);
439 EmitXmmRegisterOperand(dst, src);
440}
441
442void Assembler::divss(XmmRegister dst, const Address& src) {
443 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
444 EmitUint8(0xF3);
445 EmitUint8(0x0F);
446 EmitUint8(0x5E);
447 EmitOperand(dst, src);
448}
449
450void Assembler::flds(const Address& src) {
451 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
452 EmitUint8(0xD9);
453 EmitOperand(0, src);
454}
455
456void Assembler::fstps(const Address& dst) {
457 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
458 EmitUint8(0xD9);
459 EmitOperand(3, dst);
460}
461
462void Assembler::movsd(XmmRegister dst, const Address& src) {
463 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
464 EmitUint8(0xF2);
465 EmitUint8(0x0F);
466 EmitUint8(0x10);
467 EmitOperand(dst, src);
468}
469
470void Assembler::movsd(const Address& dst, XmmRegister src) {
471 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
472 EmitUint8(0xF2);
473 EmitUint8(0x0F);
474 EmitUint8(0x11);
475 EmitOperand(src, dst);
476}
477
479 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
480 EmitUint8(0xF2);
481 EmitUint8(0x0F);
482 EmitUint8(0x11);
483 EmitXmmRegisterOperand(src, dst);
484}
485
487 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
488 EmitUint8(0x0F);
489 EmitUint8(0x28);
490 EmitXmmRegisterOperand(dst, src);
491}
492
493void Assembler::movups(XmmRegister dst, const Address& src) {
494 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
495 EmitUint8(0x0F);
496 EmitUint8(0x10);
497 EmitOperand(dst, src);
498}
499
500void Assembler::movups(const Address& dst, XmmRegister src) {
501 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
502 EmitUint8(0x0F);
503 EmitUint8(0x11);
504 EmitOperand(src, dst);
505}
506
508 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
509 EmitUint8(0xF2);
510 EmitUint8(0x0F);
511 EmitUint8(0x58);
512 EmitXmmRegisterOperand(dst, src);
513}
514
515void Assembler::addsd(XmmRegister dst, const Address& src) {
516 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
517 EmitUint8(0xF2);
518 EmitUint8(0x0F);
519 EmitUint8(0x58);
520 EmitOperand(dst, src);
521}
522
524 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
525 EmitUint8(0x66);
526 EmitUint8(0x0F);
527 EmitUint8(0xFE);
528 EmitXmmRegisterOperand(dst, src);
529}
530
532 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
533 EmitUint8(0x66);
534 EmitUint8(0x0F);
535 EmitUint8(0xFA);
536 EmitXmmRegisterOperand(dst, src);
537}
538
540 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
541 EmitUint8(0x0F);
542 EmitUint8(0x58);
543 EmitXmmRegisterOperand(dst, src);
544}
545
547 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
548 EmitUint8(0x0F);
549 EmitUint8(0x5C);
550 EmitXmmRegisterOperand(dst, src);
551}
552
554 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
555 EmitUint8(0x0F);
556 EmitUint8(0x5E);
557 EmitXmmRegisterOperand(dst, src);
558}
559
561 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
562 EmitUint8(0x0F);
563 EmitUint8(0x59);
564 EmitXmmRegisterOperand(dst, src);
565}
566
568 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
569 EmitUint8(0x0F);
570 EmitUint8(0x5D);
571 EmitXmmRegisterOperand(dst, src);
572}
573
575 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
576 EmitUint8(0x0F);
577 EmitUint8(0x5F);
578 EmitXmmRegisterOperand(dst, src);
579}
580
582 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
583 EmitUint8(0x0F);
584 EmitUint8(0x54);
585 EmitXmmRegisterOperand(dst, src);
586}
587
588void Assembler::andps(XmmRegister dst, const Address& src) {
589 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
590 EmitUint8(0x0F);
591 EmitUint8(0x54);
592 EmitOperand(dst, src);
593}
594
596 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
597 EmitUint8(0x0F);
598 EmitUint8(0x56);
599 EmitXmmRegisterOperand(dst, src);
600}
601
603 static const struct ALIGN16 {
604 uint32_t a;
605 uint32_t b;
606 uint32_t c;
607 uint32_t d;
608 } float_not_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF};
609 xorps(dst, Address::Absolute(reinterpret_cast<uword>(&float_not_constant)));
610}
611
613 static const struct ALIGN16 {
614 uint32_t a;
615 uint32_t b;
616 uint32_t c;
617 uint32_t d;
618 } float_negate_constant = {0x80000000, 0x80000000, 0x80000000, 0x80000000};
619 xorps(dst,
620 Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
621}
622
624 static const struct ALIGN16 {
625 uint32_t a;
626 uint32_t b;
627 uint32_t c;
628 uint32_t d;
629 } float_absolute_constant = {0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF};
630 andps(dst,
631 Address::Absolute(reinterpret_cast<uword>(&float_absolute_constant)));
632}
633
635 static const struct ALIGN16 {
636 uint32_t a;
637 uint32_t b;
638 uint32_t c;
639 uint32_t d;
640 } float_zerow_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000};
642}
643
645 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
646 EmitUint8(0x0F);
647 EmitUint8(0xC2);
648 EmitXmmRegisterOperand(dst, src);
649 EmitUint8(0x0);
650}
651
653 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
654 EmitUint8(0x0F);
655 EmitUint8(0xC2);
656 EmitXmmRegisterOperand(dst, src);
657 EmitUint8(0x4);
658}
659
661 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
662 EmitUint8(0x0F);
663 EmitUint8(0xC2);
664 EmitXmmRegisterOperand(dst, src);
665 EmitUint8(0x1);
666}
667
669 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
670 EmitUint8(0x0F);
671 EmitUint8(0xC2);
672 EmitXmmRegisterOperand(dst, src);
673 EmitUint8(0x2);
674}
675
677 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
678 EmitUint8(0x0F);
679 EmitUint8(0xC2);
680 EmitXmmRegisterOperand(dst, src);
681 EmitUint8(0x5);
682}
683
685 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
686 EmitUint8(0x0F);
687 EmitUint8(0xC2);
688 EmitXmmRegisterOperand(dst, src);
689 EmitUint8(0x6);
690}
691
693 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
694 EmitUint8(0x0F);
695 EmitUint8(0x51);
696 EmitXmmRegisterOperand(dst, dst);
697}
698
700 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
701 EmitUint8(0x0F);
702 EmitUint8(0x52);
703 EmitXmmRegisterOperand(dst, dst);
704}
705
707 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
708 EmitUint8(0x0F);
709 EmitUint8(0x53);
710 EmitXmmRegisterOperand(dst, dst);
711}
712
714 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
715 EmitUint8(0x0F);
716 EmitUint8(0x12);
717 EmitXmmRegisterOperand(dst, src);
718}
719
721 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
722 EmitUint8(0x0F);
723 EmitUint8(0x16);
724 EmitXmmRegisterOperand(dst, src);
725}
726
728 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
729 EmitUint8(0x0F);
730 EmitUint8(0x14);
731 EmitXmmRegisterOperand(dst, src);
732}
733
735 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
736 EmitUint8(0x0F);
737 EmitUint8(0x15);
738 EmitXmmRegisterOperand(dst, src);
739}
740
742 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
743 EmitUint8(0x66);
744 EmitUint8(0x0F);
745 EmitUint8(0x14);
746 EmitXmmRegisterOperand(dst, src);
747}
748
750 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
751 EmitUint8(0x66);
752 EmitUint8(0x0F);
753 EmitUint8(0x15);
754 EmitXmmRegisterOperand(dst, src);
755}
756
757void Assembler::set1ps(XmmRegister dst, Register tmp1, const Immediate& imm) {
758 // Load 32-bit immediate value into tmp1.
759 movl(tmp1, imm);
760 // Move value from tmp1 into dst.
761 movd(dst, tmp1);
762 // Broadcast low lane into other three lanes.
763 shufps(dst, dst, Immediate(0x0));
764}
765
766void Assembler::shufps(XmmRegister dst, XmmRegister src, const Immediate& imm) {
767 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
768 EmitUint8(0x0F);
769 EmitUint8(0xC6);
770 EmitXmmRegisterOperand(dst, src);
771 ASSERT(imm.is_uint8());
772 EmitUint8(imm.value());
773}
774
776 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
777 EmitUint8(0x66);
778 EmitUint8(0x0F);
779 EmitUint8(0x58);
780 EmitXmmRegisterOperand(dst, src);
781}
782
784 static const struct ALIGN16 {
785 uint64_t a;
786 uint64_t b;
787 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
788 xorpd(dst,
789 Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
790}
791
793 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
794 EmitUint8(0x66);
795 EmitUint8(0x0F);
796 EmitUint8(0x5C);
797 EmitXmmRegisterOperand(dst, src);
798}
799
801 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
802 EmitUint8(0x66);
803 EmitUint8(0x0F);
804 EmitUint8(0x59);
805 EmitXmmRegisterOperand(dst, src);
806}
807
809 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
810 EmitUint8(0x66);
811 EmitUint8(0x0F);
812 EmitUint8(0x5E);
813 EmitXmmRegisterOperand(dst, src);
814}
815
817 static const struct ALIGN16 {
818 uint64_t a;
819 uint64_t b;
820 } double_absolute_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
821 andpd(dst,
822 Address::Absolute(reinterpret_cast<uword>(&double_absolute_constant)));
823}
824
826 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
827 EmitUint8(0x66);
828 EmitUint8(0x0F);
829 EmitUint8(0x5D);
830 EmitXmmRegisterOperand(dst, src);
831}
832
834 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
835 EmitUint8(0x66);
836 EmitUint8(0x0F);
837 EmitUint8(0x5F);
838 EmitXmmRegisterOperand(dst, src);
839}
840
842 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
843 EmitUint8(0x66);
844 EmitUint8(0x0F);
845 EmitUint8(0x51);
846 EmitXmmRegisterOperand(dst, dst);
847}
848
850 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
851 EmitUint8(0x0F);
852 EmitUint8(0x5A);
853 EmitXmmRegisterOperand(dst, src);
854}
855
857 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
858 EmitUint8(0x66);
859 EmitUint8(0x0F);
860 EmitUint8(0x5A);
861 EmitXmmRegisterOperand(dst, src);
862}
863
864void Assembler::shufpd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
865 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
866 EmitUint8(0x66);
867 EmitUint8(0x0F);
868 EmitUint8(0xC6);
869 EmitXmmRegisterOperand(dst, src);
870 ASSERT(imm.is_uint8());
871 EmitUint8(imm.value());
872}
873
875 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
876 EmitUint8(0xF2);
877 EmitUint8(0x0F);
878 EmitUint8(0x5C);
879 EmitXmmRegisterOperand(dst, src);
880}
881
882void Assembler::subsd(XmmRegister dst, const Address& src) {
883 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
884 EmitUint8(0xF2);
885 EmitUint8(0x0F);
886 EmitUint8(0x5C);
887 EmitOperand(dst, src);
888}
889
891 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
892 EmitUint8(0xF2);
893 EmitUint8(0x0F);
894 EmitUint8(0x59);
895 EmitXmmRegisterOperand(dst, src);
896}
897
898void Assembler::mulsd(XmmRegister dst, const Address& src) {
899 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
900 EmitUint8(0xF2);
901 EmitUint8(0x0F);
902 EmitUint8(0x59);
903 EmitOperand(dst, src);
904}
905
907 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
908 EmitUint8(0xF2);
909 EmitUint8(0x0F);
910 EmitUint8(0x5E);
911 EmitXmmRegisterOperand(dst, src);
912}
913
914void Assembler::divsd(XmmRegister dst, const Address& src) {
915 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
916 EmitUint8(0xF2);
917 EmitUint8(0x0F);
918 EmitUint8(0x5E);
919 EmitOperand(dst, src);
920}
921
923 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
924 EmitUint8(0xF3);
925 EmitUint8(0x0F);
926 EmitUint8(0x2A);
927 EmitOperand(dst, Operand(src));
928}
929
931 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
932 EmitUint8(0xF2);
933 EmitUint8(0x0F);
934 EmitUint8(0x2A);
935 EmitOperand(dst, Operand(src));
936}
937
939 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
940 EmitUint8(0xF3);
941 EmitUint8(0x0F);
942 EmitUint8(0x2D);
943 EmitXmmRegisterOperand(dst, src);
944}
945
947 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
948 EmitUint8(0xF3);
949 EmitUint8(0x0F);
950 EmitUint8(0x5A);
951 EmitXmmRegisterOperand(dst, src);
952}
953
955 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
956 EmitUint8(0xF2);
957 EmitUint8(0x0F);
958 EmitUint8(0x2D);
959 EmitXmmRegisterOperand(dst, src);
960}
961
963 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
964 EmitUint8(0xF3);
965 EmitUint8(0x0F);
966 EmitUint8(0x2C);
967 EmitXmmRegisterOperand(dst, src);
968}
969
971 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
972 EmitUint8(0xF2);
973 EmitUint8(0x0F);
974 EmitUint8(0x2C);
975 EmitXmmRegisterOperand(dst, src);
976}
977
979 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
980 EmitUint8(0xF2);
981 EmitUint8(0x0F);
982 EmitUint8(0x5A);
983 EmitXmmRegisterOperand(dst, src);
984}
985
987 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
988 EmitUint8(0xF3);
989 EmitUint8(0x0F);
990 EmitUint8(0xE6);
991 EmitXmmRegisterOperand(dst, src);
992}
993
995 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
996 EmitUint8(0x0F);
997 EmitUint8(0x2F);
998 EmitXmmRegisterOperand(a, b);
999}
1000
1002 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1003 EmitUint8(0x66);
1004 EmitUint8(0x0F);
1005 EmitUint8(0x2F);
1006 EmitXmmRegisterOperand(a, b);
1007}
1008
1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1011 EmitUint8(0x66);
1012 EmitUint8(0x0F);
1013 EmitUint8(0x50);
1014 EmitXmmRegisterOperand(dst, src);
1015}
1016
1018 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1019 EmitUint8(0x0F);
1020 EmitUint8(0x50);
1021 EmitXmmRegisterOperand(dst, src);
1022}
1023
1025 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1026 EmitUint8(0x66);
1027 EmitUint8(0x0F);
1028 EmitUint8(0xD7);
1029 EmitXmmRegisterOperand(dst, src);
1030}
1031
1033 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1034 EmitUint8(0xF2);
1035 EmitUint8(0x0F);
1036 EmitUint8(0x51);
1037 EmitXmmRegisterOperand(dst, src);
1038}
1039
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 EmitUint8(0xF3);
1043 EmitUint8(0x0F);
1044 EmitUint8(0x51);
1045 EmitXmmRegisterOperand(dst, src);
1046}
1047
1048void Assembler::xorpd(XmmRegister dst, const Address& src) {
1049 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1050 EmitUint8(0x66);
1051 EmitUint8(0x0F);
1052 EmitUint8(0x57);
1053 EmitOperand(dst, src);
1054}
1055
1057 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1058 EmitUint8(0x66);
1059 EmitUint8(0x0F);
1060 EmitUint8(0x57);
1061 EmitXmmRegisterOperand(dst, src);
1062}
1063
1065 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1066 EmitUint8(0x66);
1067 EmitUint8(0x0F);
1068 EmitUint8(0x56);
1069 EmitXmmRegisterOperand(dst, src);
1070}
1071
1072void Assembler::xorps(XmmRegister dst, const Address& src) {
1073 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1074 EmitUint8(0x0F);
1075 EmitUint8(0x57);
1076 EmitOperand(dst, src);
1077}
1078
1080 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1081 EmitUint8(0x0F);
1082 EmitUint8(0x57);
1083 EmitXmmRegisterOperand(dst, src);
1084}
1085
1086void Assembler::andpd(XmmRegister dst, const Address& src) {
1087 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1088 EmitUint8(0x66);
1089 EmitUint8(0x0F);
1090 EmitUint8(0x54);
1091 EmitOperand(dst, src);
1092}
1093
1095 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1096 EmitUint8(0x66);
1097 EmitUint8(0x0F);
1098 EmitUint8(0x54);
1099 EmitXmmRegisterOperand(dst, src);
1100}
1101
1102void Assembler::pextrd(Register dst, XmmRegister src, const Immediate& imm) {
1104 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1105 EmitUint8(0x66);
1106 EmitUint8(0x0F);
1107 EmitUint8(0x3A);
1108 EmitUint8(0x16);
1109 EmitOperand(src, Operand(dst));
1110 ASSERT(imm.is_uint8());
1111 EmitUint8(imm.value());
1112}
1113
1116 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1117 EmitUint8(0x66);
1118 EmitUint8(0x0F);
1119 EmitUint8(0x38);
1120 EmitUint8(0x25);
1121 EmitXmmRegisterOperand(dst, src);
1122}
1123
1126 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1127 EmitUint8(0x66);
1128 EmitUint8(0x0F);
1129 EmitUint8(0x38);
1130 EmitUint8(0x29);
1131 EmitXmmRegisterOperand(dst, src);
1132}
1133
1135 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1136 EmitUint8(0x66);
1137 EmitUint8(0x0F);
1138 EmitUint8(0xEF);
1139 EmitXmmRegisterOperand(dst, src);
1140}
1141
1144 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1145 EmitUint8(0x66);
1146 EmitUint8(0x0F);
1147 EmitUint8(0x3A);
1148 EmitUint8(0x0B);
1149 EmitXmmRegisterOperand(dst, src);
1150 // Mask precision exception.
1151 EmitUint8(static_cast<uint8_t>(mode) | 0x8);
1152}
1153
1154void Assembler::fldl(const Address& src) {
1155 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1156 EmitUint8(0xDD);
1157 EmitOperand(0, src);
1158}
1159
1160void Assembler::fstpl(const Address& dst) {
1161 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1162 EmitUint8(0xDD);
1163 EmitOperand(3, dst);
1164}
1165
1166void Assembler::fnstcw(const Address& dst) {
1167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1168 EmitUint8(0xD9);
1169 EmitOperand(7, dst);
1170}
1171
1172void Assembler::fldcw(const Address& src) {
1173 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1174 EmitUint8(0xD9);
1175 EmitOperand(5, src);
1176}
1177
1178void Assembler::fistpl(const Address& dst) {
1179 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1180 EmitUint8(0xDF);
1181 EmitOperand(7, dst);
1182}
1183
1184void Assembler::fistps(const Address& dst) {
1185 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1186 EmitUint8(0xDB);
1187 EmitOperand(3, dst);
1188}
1189
1190void Assembler::fildl(const Address& src) {
1191 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1192 EmitUint8(0xDF);
1193 EmitOperand(5, src);
1194}
1195
1196void Assembler::filds(const Address& src) {
1197 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1198 EmitUint8(0xDB);
1199 EmitOperand(0, src);
1200}
1201
1202void Assembler::fincstp() {
1203 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1204 EmitUint8(0xD9);
1205 EmitUint8(0xF7);
1206}
1207
1208void Assembler::ffree(intptr_t value) {
1209 ASSERT(value < 7);
1210 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1211 EmitUint8(0xDD);
1212 EmitUint8(0xC0 + value);
1213}
1214
1215void Assembler::fsin() {
1216 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1217 EmitUint8(0xD9);
1218 EmitUint8(0xFE);
1219}
1220
1221void Assembler::fcos() {
1222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1223 EmitUint8(0xD9);
1224 EmitUint8(0xFF);
1225}
1226
1227void Assembler::fsincos() {
1228 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1229 EmitUint8(0xD9);
1230 EmitUint8(0xFB);
1231}
1232
1233void Assembler::fptan() {
1234 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1235 EmitUint8(0xD9);
1236 EmitUint8(0xF2);
1237}
1238
1240 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1241 EmitUint8(0x87);
1242 EmitRegisterOperand(dst, src);
1243}
1244
1245void Assembler::cmpw(const Address& address, const Immediate& imm) {
1246 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1247 EmitOperandSizeOverride();
1248 EmitUint8(0x81);
1249 EmitOperand(7, address);
1250 EmitUint8(imm.value() & 0xFF);
1251 EmitUint8((imm.value() >> 8) & 0xFF);
1252}
1253
1254void Assembler::cmpb(const Address& address, const Immediate& imm) {
1255 ASSERT(imm.is_int8());
1256 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1257 EmitUint8(0x80);
1258 EmitOperand(7, address);
1259 EmitUint8(imm.value() & 0xFF);
1260}
1261
1262void Assembler::testl(Register reg1, Register reg2) {
1263 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1264 EmitUint8(0x85);
1265 EmitRegisterOperand(reg1, reg2);
1266}
1267
1268void Assembler::testl(Register reg, const Immediate& immediate) {
1269 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1270 // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
1271 // we only test the byte register to keep the encoding short.
1272 if (immediate.is_uint8() && reg < 4) {
1273 // Use zero-extended 8-bit immediate.
1274 if (reg == EAX) {
1275 EmitUint8(0xA8);
1276 } else {
1277 EmitUint8(0xF6);
1278 EmitUint8(0xC0 + reg);
1279 }
1280 EmitUint8(immediate.value() & 0xFF);
1281 } else if (reg == EAX) {
1282 // Use short form if the destination is EAX.
1283 EmitUint8(0xA9);
1284 EmitImmediate(immediate);
1285 } else {
1286 EmitUint8(0xF7);
1287 EmitOperand(0, Operand(reg));
1288 EmitImmediate(immediate);
1289 }
1290}
1291
1292void Assembler::testl(const Address& address, const Immediate& immediate) {
1293 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1294 EmitUint8(0xF7);
1295 EmitOperand(0, address);
1296 EmitImmediate(immediate);
1297}
1298
1299void Assembler::testl(const Address& address, Register reg) {
1300 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1301 EmitUint8(0x85);
1302 EmitOperand(reg, address);
1303}
1304
1305void Assembler::testb(const Address& address, const Immediate& imm) {
1306 ASSERT(imm.is_int8());
1307 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1308 EmitUint8(0xF6);
1309 EmitOperand(0, address);
1310 EmitUint8(imm.value() & 0xFF);
1311}
1312
1313void Assembler::testb(const Address& address, ByteRegister reg) {
1314 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1315 EmitUint8(0x84);
1316 EmitOperand(reg, address);
1317}
1318
1319void Assembler::Alu(int bytes, uint8_t opcode, Register dst, Register src) {
1320 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1321 if (bytes == 2) {
1322 EmitOperandSizeOverride();
1323 }
1324 ASSERT((opcode & 7) == 3);
1325 EmitUint8(opcode);
1326 EmitOperand(dst, Operand(src));
1327}
1328
1329void Assembler::Alu(uint8_t modrm_opcode, Register dst, const Immediate& imm) {
1330 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1331 EmitComplex(modrm_opcode, Operand(dst), imm);
1332}
1333
1334void Assembler::Alu(int bytes,
1335 uint8_t opcode,
1336 Register dst,
1337 const Address& src) {
1338 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1339 if (bytes == 2) {
1340 EmitOperandSizeOverride();
1341 }
1342 ASSERT((opcode & 7) == 3);
1343 EmitUint8(opcode);
1344 EmitOperand(dst, src);
1345}
1346
1347void Assembler::Alu(int bytes,
1348 uint8_t opcode,
1349 const Address& dst,
1350 Register src) {
1351 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1352 if (bytes == 2) {
1353 EmitOperandSizeOverride();
1354 }
1355 ASSERT((opcode & 7) == 1);
1356 EmitUint8(opcode);
1357 EmitOperand(src, dst);
1358}
1359
1360void Assembler::Alu(uint8_t modrm_opcode,
1361 const Address& dst,
1362 const Immediate& imm) {
1363 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1364 EmitComplex(modrm_opcode, dst, imm);
1365}
1366
1367void Assembler::cdq() {
1368 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1369 EmitUint8(0x99);
1370}
1371
1372void Assembler::idivl(Register reg) {
1373 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1374 EmitUint8(0xF7);
1375 EmitOperand(7, Operand(reg));
1376}
1377
1378void Assembler::divl(Register reg) {
1379 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1380 EmitUint8(0xF7);
1381 EmitOperand(6, Operand(reg));
1382}
1383
1385 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1386 EmitUint8(0x0F);
1387 EmitUint8(0xAF);
1388 EmitOperand(dst, Operand(src));
1389}
1390
1391void Assembler::imull(Register reg, const Immediate& imm) {
1392 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1393 EmitUint8(0x69);
1394 EmitOperand(reg, Operand(reg));
1395 EmitImmediate(imm);
1396}
1397
1398void Assembler::imull(Register reg, const Address& address) {
1399 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1400 EmitUint8(0x0F);
1401 EmitUint8(0xAF);
1402 EmitOperand(reg, address);
1403}
1404
1405void Assembler::imull(Register reg) {
1406 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1407 EmitUint8(0xF7);
1408 EmitOperand(5, Operand(reg));
1409}
1410
1411void Assembler::imull(const Address& address) {
1412 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1413 EmitUint8(0xF7);
1414 EmitOperand(5, address);
1415}
1416
1417void Assembler::mull(Register reg) {
1418 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1419 EmitUint8(0xF7);
1420 EmitOperand(4, Operand(reg));
1421}
1422
1423void Assembler::mull(const Address& address) {
1424 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1425 EmitUint8(0xF7);
1426 EmitOperand(4, address);
1427}
1428
1429void Assembler::incl(Register reg) {
1430 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1431 EmitUint8(0x40 + reg);
1432}
1433
1434void Assembler::incl(const Address& address) {
1435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1436 EmitUint8(0xFF);
1437 EmitOperand(0, address);
1438}
1439
1440void Assembler::decl(Register reg) {
1441 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1442 EmitUint8(0x48 + reg);
1443}
1444
1445void Assembler::decl(const Address& address) {
1446 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1447 EmitUint8(0xFF);
1448 EmitOperand(1, address);
1449}
1450
1451void Assembler::shll(Register reg, const Immediate& imm) {
1452 EmitGenericShift(4, reg, imm);
1453}
1454
1455void Assembler::shll(Register operand, Register shifter) {
1456 EmitGenericShift(4, Operand(operand), shifter);
1457}
1458
1459void Assembler::shll(const Address& operand, Register shifter) {
1460 EmitGenericShift(4, Operand(operand), shifter);
1461}
1462
1463void Assembler::shrl(Register reg, const Immediate& imm) {
1464 EmitGenericShift(5, reg, imm);
1465}
1466
1467void Assembler::shrl(Register operand, Register shifter) {
1468 EmitGenericShift(5, Operand(operand), shifter);
1469}
1470
1471void Assembler::sarl(Register reg, const Immediate& imm) {
1472 EmitGenericShift(7, reg, imm);
1473}
1474
1475void Assembler::sarl(Register operand, Register shifter) {
1476 EmitGenericShift(7, Operand(operand), shifter);
1477}
1478
1479void Assembler::sarl(const Address& address, Register shifter) {
1480 EmitGenericShift(7, Operand(address), shifter);
1481}
1482
1484 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1485 ASSERT(shifter == ECX);
1486 EmitUint8(0x0F);
1487 EmitUint8(0xA5);
1488 EmitRegisterOperand(src, dst);
1489}
1490
1491void Assembler::shldl(Register dst, Register src, const Immediate& imm) {
1492 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1493 ASSERT(imm.is_int8());
1494 EmitUint8(0x0F);
1495 EmitUint8(0xA4);
1496 EmitRegisterOperand(src, dst);
1497 EmitUint8(imm.value() & 0xFF);
1498}
1499
1500void Assembler::shldl(const Address& operand, Register src, Register shifter) {
1501 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1502 ASSERT(shifter == ECX);
1503 EmitUint8(0x0F);
1504 EmitUint8(0xA5);
1505 EmitOperand(src, Operand(operand));
1506}
1507
1509 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1510 ASSERT(shifter == ECX);
1511 EmitUint8(0x0F);
1512 EmitUint8(0xAD);
1513 EmitRegisterOperand(src, dst);
1514}
1515
1516void Assembler::shrdl(Register dst, Register src, const Immediate& imm) {
1517 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1518 ASSERT(imm.is_int8());
1519 EmitUint8(0x0F);
1520 EmitUint8(0xAC);
1521 EmitRegisterOperand(src, dst);
1522 EmitUint8(imm.value() & 0xFF);
1523}
1524
1525void Assembler::shrdl(const Address& dst, Register src, Register shifter) {
1526 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1527 ASSERT(shifter == ECX);
1528 EmitUint8(0x0F);
1529 EmitUint8(0xAD);
1530 EmitOperand(src, Operand(dst));
1531}
1532
1533void Assembler::negl(Register reg) {
1534 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1535 EmitUint8(0xF7);
1536 EmitOperand(3, Operand(reg));
1537}
1538
1539void Assembler::notl(Register reg) {
1540 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1541 EmitUint8(0xF7);
1542 EmitUint8(0xD0 | reg);
1543}
1544
1546 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1547 EmitUint8(0x0F);
1548 EmitUint8(0xBC);
1549 EmitRegisterOperand(dst, src);
1550}
1551
1553 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1554 EmitUint8(0x0F);
1555 EmitUint8(0xBD);
1556 EmitRegisterOperand(dst, src);
1557}
1558
1561 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1562 EmitUint8(0xF3);
1563 EmitUint8(0x0F);
1564 EmitUint8(0xB8);
1565 EmitRegisterOperand(dst, src);
1566}
1567
1570 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1571 EmitUint8(0xF3);
1572 EmitUint8(0x0F);
1573 EmitUint8(0xBD);
1574 EmitRegisterOperand(dst, src);
1575}
1576
1578 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1579 EmitUint8(0x0F);
1580 EmitUint8(0xA3);
1581 EmitRegisterOperand(offset, base);
1582}
1583
1584void Assembler::bt(Register base, int bit) {
1585 ASSERT(bit >= 0 && bit < 32);
1586 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1587 EmitUint8(0x0F);
1588 EmitUint8(0xBA);
1589 EmitRegisterOperand(4, base);
1590 EmitUint8(bit);
1591}
1592
1593void Assembler::enter(const Immediate& imm) {
1594 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1595 EmitUint8(0xC8);
1596 ASSERT(imm.is_uint16());
1597 EmitUint8(imm.value() & 0xFF);
1598 EmitUint8((imm.value() >> 8) & 0xFF);
1599 EmitUint8(0x00);
1600}
1601
1602void Assembler::leave() {
1603 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1604 EmitUint8(0xC9);
1605}
1606
1607void Assembler::ret() {
1608 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1609 EmitUint8(0xC3);
1610}
1611
1612void Assembler::ret(const Immediate& imm) {
1613 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1614 EmitUint8(0xC2);
1615 ASSERT(imm.is_uint16());
1616 EmitUint8(imm.value() & 0xFF);
1617 EmitUint8((imm.value() >> 8) & 0xFF);
1618}
1619
1620void Assembler::nop(int size) {
1621 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1622 // There are nops up to size 15, but for now just provide up to size 8.
1623 ASSERT(0 < size && size <= MAX_NOP_SIZE);
1624 switch (size) {
1625 case 1:
1626 EmitUint8(0x90);
1627 break;
1628 case 2:
1629 EmitUint8(0x66);
1630 EmitUint8(0x90);
1631 break;
1632 case 3:
1633 EmitUint8(0x0F);
1634 EmitUint8(0x1F);
1635 EmitUint8(0x00);
1636 break;
1637 case 4:
1638 EmitUint8(0x0F);
1639 EmitUint8(0x1F);
1640 EmitUint8(0x40);
1641 EmitUint8(0x00);
1642 break;
1643 case 5:
1644 EmitUint8(0x0F);
1645 EmitUint8(0x1F);
1646 EmitUint8(0x44);
1647 EmitUint8(0x00);
1648 EmitUint8(0x00);
1649 break;
1650 case 6:
1651 EmitUint8(0x66);
1652 EmitUint8(0x0F);
1653 EmitUint8(0x1F);
1654 EmitUint8(0x44);
1655 EmitUint8(0x00);
1656 EmitUint8(0x00);
1657 break;
1658 case 7:
1659 EmitUint8(0x0F);
1660 EmitUint8(0x1F);
1661 EmitUint8(0x80);
1662 EmitUint8(0x00);
1663 EmitUint8(0x00);
1664 EmitUint8(0x00);
1665 EmitUint8(0x00);
1666 break;
1667 case 8:
1668 EmitUint8(0x0F);
1669 EmitUint8(0x1F);
1670 EmitUint8(0x84);
1671 EmitUint8(0x00);
1672 EmitUint8(0x00);
1673 EmitUint8(0x00);
1674 EmitUint8(0x00);
1675 EmitUint8(0x00);
1676 break;
1677 default:
1678 UNIMPLEMENTED();
1679 }
1680}
1681
1682void Assembler::int3() {
1683 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1684 EmitUint8(0xCC);
1685}
1686
1687void Assembler::hlt() {
1688 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1689 EmitUint8(0xF4);
1690}
1691
1692void Assembler::j(Condition condition, Label* label, JumpDistance distance) {
1693 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1694 if (label->IsBound()) {
1695 const int kShortSize = 2;
1696 const int kLongSize = 6;
1697 intptr_t offset = label->Position() - buffer_.Size();
1698 ASSERT(offset <= 0);
1699 if (Utils::IsInt(8, offset - kShortSize)) {
1700 EmitUint8(0x70 + condition);
1701 EmitUint8((offset - kShortSize) & 0xFF);
1702 } else {
1703 EmitUint8(0x0F);
1704 EmitUint8(0x80 + condition);
1705 EmitInt32(offset - kLongSize);
1706 }
1707 } else if (distance == kNearJump) {
1708 EmitUint8(0x70 + condition);
1709 EmitNearLabelLink(label);
1710 } else {
1711 EmitUint8(0x0F);
1712 EmitUint8(0x80 + condition);
1713 EmitLabelLink(label);
1714 }
1715}
1716
1717void Assembler::j(Condition condition, const ExternalLabel* label) {
1718 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1719 EmitUint8(0x0F);
1720 EmitUint8(0x80 + condition);
1721 EmitFixup(new DirectCallRelocation());
1722 EmitInt32(label->address());
1723}
1724
1725void Assembler::jmp(Register reg) {
1726 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1727 EmitUint8(0xFF);
1728 EmitRegisterOperand(4, reg);
1729}
1730
1731void Assembler::jmp(const Address& address) {
1732 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1733 EmitUint8(0xFF);
1734 EmitOperand(4, address);
1735}
1736
1737void Assembler::jmp(Label* label, JumpDistance distance) {
1738 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1739 if (label->IsBound()) {
1740 const int kShortSize = 2;
1741 const int kLongSize = 5;
1742 intptr_t offset = label->Position() - buffer_.Size();
1743 ASSERT(offset <= 0);
1744 if (Utils::IsInt(8, offset - kShortSize)) {
1745 EmitUint8(0xEB);
1746 EmitUint8((offset - kShortSize) & 0xFF);
1747 } else {
1748 EmitUint8(0xE9);
1749 EmitInt32(offset - kLongSize);
1750 }
1751 } else if (distance == kNearJump) {
1752 EmitUint8(0xEB);
1753 EmitNearLabelLink(label);
1754 } else {
1755 EmitUint8(0xE9);
1756 EmitLabelLink(label);
1757 }
1758}
1759
1760void Assembler::jmp(const ExternalLabel* label) {
1761 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1762 EmitUint8(0xE9);
1763 EmitFixup(new DirectCallRelocation());
1764 EmitInt32(label->address());
1765}
1766
1767void Assembler::lock() {
1768 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1769 EmitUint8(0xF0);
1770}
1771
1772void Assembler::cmpxchgl(const Address& address, Register reg) {
1773 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1774 EmitUint8(0x0F);
1775 EmitUint8(0xB1);
1776 EmitOperand(reg, address);
1777}
1778
1779void Assembler::cld() {
1780 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1781 EmitUint8(0xFC);
1782}
1783
1784void Assembler::std() {
1785 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1786 EmitUint8(0xFD);
1787}
1788
1789void Assembler::cpuid() {
1790 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1791 EmitUint8(0x0F);
1792 EmitUint8(0xA2);
1793}
1794
1796 cmpl(a, b);
1797}
1798
1799void Assembler::Load(Register reg, const Address& address, OperandSize type) {
1800 switch (type) {
1801 case kByte:
1802 return movsxb(reg, address);
1803 case kUnsignedByte:
1804 return movzxb(reg, address);
1805 case kTwoBytes:
1806 return movsxw(reg, address);
1807 case kUnsignedTwoBytes:
1808 return movzxw(reg, address);
1809 case kUnsignedFourBytes:
1810 case kFourBytes:
1811 return movl(reg, address);
1812 default:
1813 UNREACHABLE();
1814 break;
1815 }
1816}
1817
1818void Assembler::Store(Register reg, const Address& address, OperandSize sz) {
1819 switch (sz) {
1820 case kByte:
1821 case kUnsignedByte:
1822 return movb(address, reg);
1823 case kTwoBytes:
1824 case kUnsignedTwoBytes:
1825 return movw(address, reg);
1826 case kFourBytes:
1827 case kUnsignedFourBytes:
1828 return movl(address, reg);
1829 default:
1830 UNREACHABLE();
1831 break;
1832 }
1833}
1834
1835void Assembler::Store(const Object& object, const Address& dst) {
1836 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
1837 movl(dst, Immediate(target::ToRawPointer(object)));
1838 } else {
1839 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
1840 ASSERT(IsInOldSpace(object));
1841 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1842 EmitUint8(0xC7);
1843 EmitOperand(0, dst);
1844 buffer_.EmitObject(object);
1845 }
1846}
1847
1848void Assembler::ArithmeticShiftRightImmediate(Register reg, intptr_t shift) {
1849 sarl(reg, Immediate(shift));
1850}
1851
1853 Register reg2,
1854 intptr_t offset,
1856 Register temp,
1857 Label* equals) {
1858 Label loop;
1859 Bind(&loop);
1860 decl(count);
1863 movl(temp, FieldAddress(reg1, count, TIMES_4, offset));
1864 cmpl(temp, FieldAddress(reg2, count, TIMES_4, offset));
1866}
1867
1868void Assembler::LoadFromStack(Register dst, intptr_t depth) {
1869 ASSERT(depth >= 0);
1870 movl(dst, Address(ESP, depth * target::kWordSize));
1871}
1872
1873void Assembler::StoreToStack(Register src, intptr_t depth) {
1874 ASSERT(depth >= 0);
1875 movl(Address(ESP, depth * target::kWordSize), src);
1876}
1877
1878void Assembler::CompareToStack(Register src, intptr_t depth) {
1879 cmpl(src, Address(ESP, depth * target::kWordSize));
1880}
1881
1883 switch (sz) {
1884 case kUnsignedFourBytes:
1885 case kFourBytes:
1886 if (to == from) return; // No operation needed.
1887 return movl(to, from);
1888 case kUnsignedTwoBytes:
1889 return movzxw(to, from);
1890 case kTwoBytes:
1891 return movsxw(to, from);
1892 case kUnsignedByte:
1893 switch (from) {
1894 case EAX:
1895 case EBX:
1896 case ECX:
1897 case EDX:
1898 return movzxb(to, ByteRegisterOf(from));
1899 break;
1900 default:
1901 if (to != from) {
1902 movl(to, from);
1903 }
1904 return andl(to, Immediate(0xFF));
1905 }
1906 case kByte:
1907 switch (from) {
1908 case EAX:
1909 case EBX:
1910 case ECX:
1911 case EDX:
1912 return movsxb(to, ByteRegisterOf(from));
1913 break;
1914 default:
1915 if (to != from) {
1916 movl(to, from);
1917 }
1918 shll(to, Immediate(24));
1919 return sarl(to, Immediate(24));
1920 }
1921 default:
1922 UNIMPLEMENTED();
1923 break;
1924 }
1925}
1926
1928 pushl(r);
1929}
1930
1932 popl(r);
1933}
1934
1935void Assembler::PushRegistersInOrder(std::initializer_list<Register> regs) {
1936 for (Register reg : regs) {
1937 PushRegister(reg);
1938 }
1939}
1940
1941void Assembler::AddImmediate(Register reg, const Immediate& imm) {
1942 const intptr_t value = imm.value();
1943 if (value == 0) {
1944 return;
1945 }
1946 if ((value > 0) || (value == kMinInt32)) {
1947 if (value == 1) {
1948 incl(reg);
1949 } else {
1950 addl(reg, imm);
1951 }
1952 } else {
1953 SubImmediate(reg, Immediate(-value));
1954 }
1955}
1956
1958 if (dest == src) {
1960 return;
1961 }
1962 if (value == 0) {
1964 return;
1965 }
1966 leal(dest, Address(src, value));
1967}
1968
1969void Assembler::SubImmediate(Register reg, const Immediate& imm) {
1970 const intptr_t value = imm.value();
1971 if (value == 0) {
1972 return;
1973 }
1974 if ((value > 0) || (value == kMinInt32)) {
1975 if (value == 1) {
1976 decl(reg);
1977 } else {
1978 subl(reg, imm);
1979 }
1980 } else {
1981 AddImmediate(reg, Immediate(-value));
1982 }
1983}
1984
1986 ASSERT(src1 != src2); // Likely a mistake.
1987 if (src2 == kNoRegister) {
1988 src2 = dst;
1989 }
1990 if (dst == src2) {
1991 andl(dst, src1);
1992 } else if (dst == src1) {
1993 andl(dst, src2);
1994 } else {
1995 movl(dst, src1);
1996 andl(dst, src2);
1997 }
1998}
1999
2000void Assembler::Drop(intptr_t stack_elements) {
2001 ASSERT(stack_elements >= 0);
2002 if (stack_elements > 0) {
2003 addl(ESP, Immediate(stack_elements * target::kWordSize));
2004 }
2005}
2006
2009}
2010
2013}
2014
2016 const Object& object,
2017 bool movable_referent) {
2018 ASSERT(IsOriginalObject(object));
2019
2020 // movable_referent: some references to VM heap objects may be patched with
2021 // references to isolate-local objects (e.g., optimized static calls).
2022 // We need to track such references since the latter may move during
2023 // compaction.
2024 if (target::CanEmbedAsRawPointerInGeneratedCode(object) &&
2025 !movable_referent) {
2026 movl(dst, Immediate(target::ToRawPointer(object)));
2027 } else {
2028 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2029 ASSERT(IsInOldSpace(object));
2030 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2031 EmitUint8(0xB8 + dst);
2032 buffer_.EmitObject(object);
2033 }
2034}
2035
2036void Assembler::LoadObjectSafely(Register dst, const Object& object) {
2037 ASSERT(IsOriginalObject(object));
2038 if (target::IsSmi(object) && !IsSafeSmi(object)) {
2039 const int32_t cookie = jit_cookie();
2040 movl(dst, Immediate(target::ToRawSmi(object) ^ cookie));
2041 xorl(dst, Immediate(cookie));
2042 } else {
2043 LoadObject(dst, object);
2044 }
2045}
2046
2047void Assembler::PushObject(const Object& object) {
2048 ASSERT(IsOriginalObject(object));
2049 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
2050 pushl(Immediate(target::ToRawPointer(object)));
2051 } else {
2052 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2053 ASSERT(IsInOldSpace(object));
2054 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2055 EmitUint8(0x68);
2056 buffer_.EmitObject(object);
2057 }
2058}
2059
2060void Assembler::CompareObject(Register reg, const Object& object) {
2061 ASSERT(IsOriginalObject(object));
2062 if (target::CanEmbedAsRawPointerInGeneratedCode(object)) {
2063 cmpl(reg, Immediate(target::ToRawPointer(object)));
2064 } else {
2065 DEBUG_ASSERT(IsNotTemporaryScopedHandle(object));
2066 ASSERT(IsInOldSpace(object));
2067 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2068 if (reg == EAX) {
2069 EmitUint8(0x05 + (7 << 3));
2070 buffer_.EmitObject(object);
2071 } else {
2072 EmitUint8(0x81);
2073 EmitOperand(7, Operand(reg));
2074 buffer_.EmitObject(object);
2075 }
2076 }
2077}
2078
2081 CanBeSmi can_be_smi,
2082 Register scratch) {
2083 // x.slot = x. Barrier should have be removed at the IL level.
2084 ASSERT(object != value);
2085
2086 bool spill_scratch = false;
2087 if (scratch == kNoRegister) {
2088 spill_scratch = true;
2089 if (object != EAX && value != EAX) {
2090 scratch = EAX;
2091 } else if (object != EBX && value != EBX) {
2092 scratch = EBX;
2093 } else {
2094 scratch = ECX;
2095 }
2096 }
2097 ASSERT(scratch != object);
2098 ASSERT(scratch != value);
2099
2100 // In parallel, test whether
2101 // - object is old and not remembered and value is new, or
2102 // - object is old and value is old and not marked and concurrent marking is
2103 // in progress
2104 // If so, call the WriteBarrier stub, which will either add object to the
2105 // store buffer (case 1) or add value to the marking stack (case 2).
2106 // Compare UntaggedObject::StorePointer.
2107 Label done;
2108 if (can_be_smi == kValueCanBeSmi) {
2110 } else {
2111#if defined(DEBUG)
2112 Label passed_check;
2113 BranchIfNotSmi(value, &passed_check, kNearJump);
2114 Breakpoint();
2115 Bind(&passed_check);
2116#endif
2117 }
2118 if (spill_scratch) {
2119 pushl(scratch);
2120 }
2121 movl(scratch, FieldAddress(object, target::Object::tags_offset()));
2123 andl(scratch, Address(THR, target::Thread::write_barrier_mask_offset()));
2124 testl(FieldAddress(value, target::Object::tags_offset()), scratch);
2125 if (spill_scratch) {
2126 popl(scratch);
2127 }
2128 j(ZERO, &done, kNearJump);
2129
2130 Register object_for_call = object;
2132 // Unlikely. Only non-graph intrinsics.
2133 // TODO(rmacnak): Shuffle registers in intrinsics.
2135 if (object == kWriteBarrierValueReg) {
2138 object_for_call = (value == EAX) ? ECX : EAX;
2139 pushl(object_for_call);
2140 movl(object_for_call, object);
2141 }
2143 }
2145 object_for_call)));
2147 if (object == kWriteBarrierValueReg) {
2148 popl(object_for_call);
2149 }
2151 }
2152 Bind(&done);
2153}
2154
2156 Register slot,
2158 CanBeSmi can_be_smi,
2159 Register scratch) {
2160 ASSERT(object != value);
2161 ASSERT(scratch != object);
2162 ASSERT(scratch != value);
2163 ASSERT(scratch != slot);
2164
2165 // In parallel, test whether
2166 // - object is old and not remembered and value is new, or
2167 // - object is old and value is old and not marked and concurrent marking is
2168 // in progress
2169 // If so, call the WriteBarrier stub, which will either add object to the
2170 // store buffer (case 1) or add value to the marking stack (case 2).
2171 // Compare UntaggedObject::StorePointer.
2172 Label done;
2173 if (can_be_smi == kValueCanBeSmi) {
2175 } else {
2176#if defined(DEBUG)
2177 Label passed_check;
2178 BranchIfNotSmi(value, &passed_check, kNearJump);
2179 Breakpoint();
2180 Bind(&passed_check);
2181#endif
2182 }
2183 movl(scratch, FieldAddress(object, target::Object::tags_offset()));
2185 andl(scratch, Address(THR, target::Thread::write_barrier_mask_offset()));
2186 testl(FieldAddress(value, target::Object::tags_offset()), scratch);
2187 j(ZERO, &done, kNearJump);
2188
2189 if ((object != kWriteBarrierObjectReg) || (value != kWriteBarrierValueReg) ||
2190 (slot != kWriteBarrierSlotReg)) {
2191 // Spill and shuffle unimplemented. Currently StoreIntoArray is only used
2192 // from StoreIndexInstr, which gets these exact registers from the register
2193 // allocator.
2194 UNIMPLEMENTED();
2195 }
2197 Bind(&done);
2198}
2199
2201 Register value) {
2202 // We can't assert the incremental barrier is not needed here, only the
2203 // generational barrier. We sometimes omit the write barrier when 'value' is
2204 // a constant, but we don't eagerly mark 'value' and instead assume it is also
2205 // reachable via a constant pool, so it doesn't matter if it is not traced via
2206 // 'object'.
2207 Label done;
2209 testb(FieldAddress(value, target::Object::tags_offset()),
2212 testb(FieldAddress(object, target::Object::tags_offset()),
2215 Stop("Write barrier is required");
2216 Bind(&done);
2217}
2218
2220 const Address& dest,
2221 const Object& value,
2222 MemoryOrder memory_order,
2223 OperandSize size) {
2226 // Ignoring memory_order.
2227 // On intel stores have store-release behavior (i.e. stores are not
2228 // re-ordered with other stores).
2229 // We don't run TSAN on 32 bit systems.
2230 // Don't call StoreRelease here because we would have to load the immediate
2231 // into a temp register which causes spilling.
2232 if (FLAG_target_thread_sanitizer) {
2233 if (memory_order == kRelease) {
2234 UNIMPLEMENTED();
2235 }
2236 }
2237 if (target::CanEmbedAsRawPointerInGeneratedCode(value)) {
2238 Immediate imm_value(target::ToRawPointer(value));
2239 movl(dest, imm_value);
2240 } else {
2241 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2242 EmitUint8(0xC7);
2243 EmitOperand(0, dest);
2244 buffer_.EmitObject(value);
2245 }
2246 // No store buffer update.
2247}
2248
2250 const Address& dest,
2251 Register value) {
2252 movl(dest, value);
2253}
2254
2255void Assembler::StoreIntoSmiField(const Address& dest, Register value) {
2256#if defined(DEBUG)
2257 Label done;
2258 testl(value, Immediate(kHeapObjectTag));
2259 j(ZERO, &done);
2260 Stop("New value must be Smi.");
2261 Bind(&done);
2262#endif // defined(DEBUG)
2263 movl(dest, value);
2264}
2265
2266void Assembler::ZeroInitSmiField(const Address& dest) {
2267 Immediate zero(target::ToRawSmi(0));
2268 movl(dest, zero);
2269}
2270
2271void Assembler::IncrementSmiField(const Address& dest, int32_t increment) {
2272 // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
2273 // the length of this instruction sequence.
2274 Immediate inc_imm(target::ToRawSmi(increment));
2275 addl(dest, inc_imm);
2276}
2277
2279 int32_t constant = bit_cast<int32_t, float>(value);
2280 pushl(Immediate(constant));
2281 movss(dst, Address(ESP, 0));
2282 addl(ESP, Immediate(target::kWordSize));
2283}
2284
2286 // TODO(5410843): Need to have a code constants table.
2287 int64_t constant = bit_cast<int64_t, double>(value);
2288 pushl(Immediate(Utils::High32Bits(constant)));
2289 pushl(Immediate(Utils::Low32Bits(constant)));
2290 movsd(dst, Address(ESP, 0));
2291 addl(ESP, Immediate(2 * target::kWordSize));
2292}
2293
2294void Assembler::LoadQImmediate(XmmRegister dst, simd128_value_t value) {
2295 // TODO(5410843): Need to have a code constants table.
2296 pushl(Immediate(value.int_storage[3]));
2297 pushl(Immediate(value.int_storage[2]));
2298 pushl(Immediate(value.int_storage[1]));
2299 pushl(Immediate(value.int_storage[0]));
2300 movups(dst, Address(ESP, 0));
2301 addl(ESP, Immediate(4 * target::kWordSize));
2302}
2303
2305 static const struct ALIGN16 {
2306 uint32_t a;
2307 uint32_t b;
2308 uint32_t c;
2309 uint32_t d;
2310 } float_negate_constant = {0x80000000, 0x00000000, 0x80000000, 0x00000000};
2311 xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
2312}
2313
2315 static const struct ALIGN16 {
2316 uint64_t a;
2317 uint64_t b;
2318 } double_negate_constant = {0x8000000000000000LLU, 0x8000000000000000LLU};
2319 xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
2320}
2321
2323 static const struct ALIGN16 {
2324 uint64_t a;
2325 uint64_t b;
2326 } double_abs_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
2327 andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
2328}
2329
2330void Assembler::EnterFrame(intptr_t frame_size) {
2331 if (prologue_offset_ == -1) {
2332 Comment("PrologueOffset = %" Pd "", CodeSize());
2334 }
2335#ifdef DEBUG
2336 intptr_t check_offset = CodeSize();
2337#endif
2338 pushl(EBP);
2339 movl(EBP, ESP);
2340#ifdef DEBUG
2341 ProloguePattern pp(CodeAddress(check_offset));
2342 ASSERT(pp.IsValid());
2343#endif
2344 if (frame_size != 0) {
2345 Immediate frame_space(frame_size);
2346 subl(ESP, frame_space);
2347 }
2348}
2349
2350void Assembler::LeaveFrame() {
2351 movl(ESP, EBP);
2352 popl(EBP);
2353}
2354
2355void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2356 // Reserve space for arguments and align frame before entering
2357 // the C++ world.
2358 AddImmediate(ESP, Immediate(-frame_space));
2359 if (OS::ActivationFrameAlignment() > 1) {
2360 andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
2361 }
2362}
2363
2365#if defined(DEBUG)
2366 Label ok;
2367 leal(EAX, Address(EBP, target::frame_layout.exit_link_slot_from_entry_fp *
2369 cmpl(EAX, ESP);
2370 j(EQUAL, &ok);
2371 Stop("target::frame_layout.exit_link_slot_from_entry_fp mismatch");
2372 Bind(&ok);
2373#endif
2374}
2375
2376// EBX receiver, ECX ICData entries array
2377// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2380 intptr_t start = CodeSize();
2381 Label have_cid, miss;
2382 Bind(&miss);
2384
2385 Comment("MonomorphicCheckedEntry");
2386 ASSERT(CodeSize() - start ==
2388
2389 const intptr_t cid_offset = target::Array::element_offset(0);
2390 const intptr_t count_offset = target::Array::element_offset(1);
2391
2392 movl(EAX, Immediate(kSmiCid << 1));
2393 testl(EBX, Immediate(kSmiTagMask));
2394 j(ZERO, &have_cid, kNearJump);
2396 SmiTag(EAX);
2397 Bind(&have_cid);
2398 // EAX: cid as Smi
2399
2400 cmpl(EAX, FieldAddress(ECX, cid_offset));
2402 addl(FieldAddress(ECX, count_offset), Immediate(target::ToRawSmi(1)));
2403 xorl(EDX, EDX); // GC-safe for OptimizeInvokedFunction.
2404 nop(1);
2405
2406 // Fall through to unchecked entry.
2407 ASSERT(CodeSize() - start ==
2409}
2410
2411// EBX receiver, ECX guarded cid as Smi.
2412// Preserve EDX (ARGS_DESC_REG), not required today, but maybe later.
2414 UNIMPLEMENTED();
2415}
2416
2420 int3();
2421 }
2422 jmp(label);
2424 int3();
2425 }
2426}
2427
2429 // hash += other_hash
2430 addl(dst, other);
2431 // hash += hash << 10
2432 movl(other, dst);
2433 shll(other, Immediate(10));
2434 addl(dst, other);
2435 // hash ^= hash >> 6
2436 movl(other, dst);
2437 shrl(other, Immediate(6));
2438 xorl(dst, other);
2439}
2440
2441void Assembler::FinalizeHashForSize(intptr_t bit_size,
2442 Register dst,
2443 Register scratch) {
2444 ASSERT(bit_size > 0); // Can't avoid returning 0 if there are no hash bits!
2445 // While any 32-bit hash value fits in X bits, where X > 32, the caller may
2446 // reasonably expect that the returned values fill the entire bit space.
2447 ASSERT(bit_size <= kBitsPerInt32);
2448 ASSERT(scratch != kNoRegister);
2449 // hash += hash << 3;
2450 movl(scratch, dst);
2451 shll(scratch, Immediate(3));
2452 addl(dst, scratch);
2453 // hash ^= hash >> 11; // Logical shift, unsigned hash.
2454 movl(scratch, dst);
2455 shrl(scratch, Immediate(11));
2456 xorl(dst, scratch);
2457 // hash += hash << 15;
2458 movl(scratch, dst);
2459 shll(scratch, Immediate(15));
2460 addl(dst, scratch);
2461 // Size to fit.
2462 if (bit_size < kBitsPerInt32) {
2463 andl(dst, Immediate(Utils::NBitMask(bit_size)));
2464 }
2465 // return (hash == 0) ? 1 : hash;
2466 Label done;
2468 incl(dst);
2469 Bind(&done);
2470}
2471
2473 // We generate the same number of instructions whether or not the slow-path is
2474 // forced. This simplifies GenerateJitCallbackTrampolines.
2475
2476 // Compare and swap the value at Thread::safepoint_state from unacquired
2477 // to acquired. On success, jump to 'success'; otherwise, fallthrough.
2478 Label done, slow_path;
2479 if (FLAG_use_slow_path) {
2480 jmp(&slow_path);
2481 }
2482
2483 pushl(EAX);
2487 movl(scratch, EAX);
2488 popl(EAX);
2489 cmpl(scratch, Immediate(target::Thread::full_safepoint_state_unacquired()));
2490
2491 if (!FLAG_use_slow_path) {
2492 j(EQUAL, &done);
2493 }
2494
2495 Bind(&slow_path);
2497 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2498 call(scratch);
2499
2500 Bind(&done);
2501}
2502
2503void Assembler::TransitionGeneratedToNative(Register destination_address,
2504 Register new_exit_frame,
2505 Register new_exit_through_ffi,
2506 bool enter_safepoint) {
2507 // Save exit frame information to enable stack walking.
2509 new_exit_frame);
2510
2511 movl(compiler::Address(THR,
2513 new_exit_through_ffi);
2514 Register scratch = new_exit_through_ffi;
2515
2516 // Mark that the thread is executing native code.
2517 movl(VMTagAddress(), destination_address);
2520
2521 if (enter_safepoint) {
2522 EnterFullSafepoint(scratch);
2523 }
2524}
2525
2527 bool ignore_unwind_in_progress) {
2528 ASSERT(scratch != EAX);
2529 // We generate the same number of instructions whether or not the slow-path is
2530 // forced, for consistency with EnterFullSafepoint.
2531
2532 // Compare and swap the value at Thread::safepoint_state from acquired
2533 // to unacquired. On success, jump to 'success'; otherwise, fallthrough.
2534 Label done, slow_path;
2535 if (FLAG_use_slow_path) {
2536 jmp(&slow_path);
2537 }
2538
2539 pushl(EAX);
2543 movl(scratch, EAX);
2544 popl(EAX);
2545 cmpl(scratch, Immediate(target::Thread::full_safepoint_state_acquired()));
2546
2547 if (!FLAG_use_slow_path) {
2548 j(EQUAL, &done);
2549 }
2550
2551 Bind(&slow_path);
2552 if (ignore_unwind_in_progress) {
2553 movl(scratch,
2554 Address(THR,
2555 target::Thread::
2556 exit_safepoint_ignore_unwind_in_progress_stub_offset()));
2557 } else {
2559 }
2560 movl(scratch, FieldAddress(scratch, target::Code::entry_point_offset()));
2561 call(scratch);
2562
2563 Bind(&done);
2564}
2565
2567 bool exit_safepoint,
2568 bool ignore_unwind_in_progress,
2569 bool set_tag) {
2570 if (exit_safepoint) {
2571 ExitFullSafepoint(scratch, ignore_unwind_in_progress);
2572 } else {
2573 // flag only makes sense if we are leaving safepoint
2574 ASSERT(!ignore_unwind_in_progress);
2575#if defined(DEBUG)
2576 // Ensure we've already left the safepoint.
2577 movl(scratch, Address(THR, target::Thread::safepoint_state_offset()));
2578 andl(scratch, Immediate(target::Thread::full_safepoint_state_acquired()));
2579 Label ok;
2580 j(ZERO, &ok);
2581 Breakpoint();
2582 Bind(&ok);
2583#endif
2584 }
2585
2586 // Mark that the thread is executing Dart code.
2587 if (set_tag) {
2589 Immediate(target::Thread::vm_tag_dart_id()));
2590 }
2593
2594 // Reset exit frame information in Isolate's mutator thread structure.
2596 Immediate(0));
2597 movl(compiler::Address(THR,
2599 compiler::Immediate(0));
2600}
2601
2602static constexpr intptr_t kNumberOfVolatileCpuRegisters = 3;
2603static const Register volatile_cpu_registers[kNumberOfVolatileCpuRegisters] = {
2604 EAX, ECX, EDX};
2605
2606void Assembler::CallRuntime(const RuntimeEntry& entry,
2607 intptr_t argument_count) {
2608 ASSERT(!entry.is_leaf());
2609 // Argument count is not checked here, but in the runtime entry for a more
2610 // informative error message.
2611 movl(ECX, compiler::Address(THR, entry.OffsetFromThread()));
2612 movl(EDX, compiler::Immediate(argument_count));
2614}
2615
2616#define __ assembler_->
2617
2618LeafRuntimeScope::LeafRuntimeScope(Assembler* assembler,
2619 intptr_t frame_size,
2620 bool preserve_registers)
2621 : assembler_(assembler), preserve_registers_(preserve_registers) {
2622 __ Comment("EnterCallRuntimeFrame");
2623 __ EnterFrame(0);
2624
2625 if (preserve_registers_) {
2626 // Preserve volatile CPU registers.
2627 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) {
2628 __ pushl(volatile_cpu_registers[i]);
2629 }
2630
2631 // Preserve all XMM registers.
2632 __ subl(ESP, Immediate(kNumberOfXmmRegisters * kFpuRegisterSize));
2633 // Store XMM registers with the lowest register number at the lowest
2634 // address.
2635 intptr_t offset = 0;
2636 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2637 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2638 __ movups(Address(ESP, offset), xmm_reg);
2640 }
2641 } else {
2642 // These registers must always be preserved.
2644 }
2645
2646 __ ReserveAlignedFrameSpace(frame_size);
2647}
2648
2649void LeafRuntimeScope::Call(const RuntimeEntry& entry,
2650 intptr_t argument_count) {
2651 ASSERT(argument_count == entry.argument_count());
2652 __ movl(EAX, compiler::Address(THR, entry.OffsetFromThread()));
2653 __ movl(compiler::Assembler::VMTagAddress(), EAX);
2654 __ call(EAX);
2655 __ movl(compiler::Assembler::VMTagAddress(),
2656 compiler::Immediate(VMTag::kDartTagId));
2657}
2658
2659LeafRuntimeScope::~LeafRuntimeScope() {
2660 if (preserve_registers_) {
2661 // ESP might have been modified to reserve space for arguments
2662 // and ensure proper alignment of the stack frame.
2663 // We need to restore it before restoring registers.
2664 const intptr_t kPushedRegistersSize =
2665 kNumberOfVolatileCpuRegisters * target::kWordSize +
2667 __ leal(ESP, Address(EBP, -kPushedRegistersSize));
2668
2669 // Restore all XMM registers.
2670 // XMM registers have the lowest register number at the lowest address.
2671 intptr_t offset = 0;
2672 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
2673 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
2674 __ movups(xmm_reg, Address(ESP, offset));
2676 }
2677 __ addl(ESP, Immediate(offset));
2678
2679 // Restore volatile CPU registers.
2680 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) {
2681 __ popl(volatile_cpu_registers[i]);
2682 }
2683 }
2684
2685 __ leave();
2686}
2687
2688void Assembler::Call(const Code& target,
2689 bool movable_target,
2690 CodeEntryKind entry_kind) {
2691 LoadObject(CODE_REG, ToObject(target), movable_target);
2692 call(FieldAddress(CODE_REG, target::Code::entry_point_offset(entry_kind)));
2693}
2694
2695void Assembler::CallVmStub(const Code& target) {
2696 const Object& target_as_object = CastHandle<Object, Code>(target);
2697 ASSERT(target::CanEmbedAsRawPointerInGeneratedCode(target_as_object));
2699 target::ToRawPointer(target_as_object) +
2700 target::Code::entry_point_offset(CodeEntryKind::kNormal) -
2702}
2703
2704void Assembler::Jmp(const Code& target) {
2705 const ExternalLabel label(target::Code::EntryPointOf(target));
2706 jmp(&label);
2707}
2708
2709void Assembler::J(Condition condition, const Code& target) {
2710 const ExternalLabel label(target::Code::EntryPointOf(target));
2711 j(condition, &label);
2712}
2713
2714void Assembler::Align(intptr_t alignment, intptr_t offset) {
2715 ASSERT(Utils::IsPowerOfTwo(alignment));
2716 intptr_t pos = offset + buffer_.GetPosition();
2717 intptr_t mod = pos & (alignment - 1);
2718 if (mod == 0) {
2719 return;
2720 }
2721 intptr_t bytes_needed = alignment - mod;
2722 while (bytes_needed > MAX_NOP_SIZE) {
2723 nop(MAX_NOP_SIZE);
2725 }
2726 if (bytes_needed != 0) {
2727 nop(bytes_needed);
2728 }
2729 ASSERT(((offset + buffer_.GetPosition()) & (alignment - 1)) == 0);
2730}
2731
2732void Assembler::Bind(Label* label) {
2733 intptr_t bound = buffer_.Size();
2734 ASSERT(!label->IsBound()); // Labels can only be bound once.
2735 while (label->IsLinked()) {
2736 intptr_t position = label->LinkPosition();
2737 intptr_t next = buffer_.Load<int32_t>(position);
2738 buffer_.Store<int32_t>(position, bound - (position + 4));
2739 label->position_ = next;
2740 }
2741 while (label->HasNear()) {
2742 intptr_t position = label->NearPosition();
2743 intptr_t offset = bound - (position + 1);
2744 ASSERT(Utils::IsInt(8, offset));
2745 buffer_.Store<int8_t>(position, offset);
2746 }
2747 label->BindTo(bound);
2748}
2749
2750void Assembler::MoveMemoryToMemory(Address dst, Address src, Register tmp) {
2751 movl(tmp, src);
2752 movl(dst, tmp);
2753}
2754
2755#ifndef PRODUCT
2756void Assembler::MaybeTraceAllocation(intptr_t cid,
2757 Label* trace,
2758 Register temp_reg,
2759 JumpDistance distance) {
2760 ASSERT(cid > 0);
2761 Address state_address(kNoRegister, 0);
2762
2763 ASSERT(temp_reg != kNoRegister);
2764 LoadIsolateGroup(temp_reg);
2765 movl(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
2766 movl(temp_reg,
2767 Address(temp_reg,
2768 target::ClassTable::allocation_tracing_state_table_offset()));
2769 cmpb(Address(temp_reg,
2770 target::ClassTable::AllocationTracingStateSlotOffsetFor(cid)),
2771 Immediate(0));
2772 // We are tracing for this class, jump to the trace label which will use
2773 // the allocation stub.
2774 j(NOT_ZERO, trace, distance);
2775}
2776#endif // !PRODUCT
2777
2778void Assembler::TryAllocateObject(intptr_t cid,
2779 intptr_t instance_size,
2780 Label* failure,
2781 JumpDistance distance,
2782 Register instance_reg,
2783 Register temp_reg) {
2784 ASSERT(failure != nullptr);
2785 ASSERT(instance_size != 0);
2786 ASSERT(Utils::IsAligned(instance_size,
2788 if (FLAG_inline_alloc &&
2790 // If this allocation is traced, program will jump to failure path
2791 // (i.e. the allocation stub) which will allocate the object and trace the
2792 // allocation call site.
2793 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, temp_reg, distance));
2794 movl(instance_reg, Address(THR, target::Thread::top_offset()));
2795 addl(instance_reg, Immediate(instance_size));
2796 // instance_reg: potential next object start.
2797 cmpl(instance_reg, Address(THR, target::Thread::end_offset()));
2798 j(ABOVE_EQUAL, failure, distance);
2799 CheckAllocationCanary(instance_reg);
2800 // Successfully allocated the object, now update top to point to
2801 // next object start and store the class in the class field of object.
2802 movl(Address(THR, target::Thread::top_offset()), instance_reg);
2803 ASSERT(instance_size >= kHeapObjectTag);
2804 subl(instance_reg, Immediate(instance_size - kHeapObjectTag));
2805 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
2806 movl(FieldAddress(instance_reg, target::Object::tags_offset()),
2807 Immediate(tags));
2808 } else {
2809 jmp(failure);
2810 }
2811}
2812
2813void Assembler::TryAllocateArray(intptr_t cid,
2814 intptr_t instance_size,
2815 Label* failure,
2816 JumpDistance distance,
2818 Register end_address,
2819 Register temp_reg) {
2820 ASSERT(failure != nullptr);
2821 ASSERT(temp_reg != kNoRegister);
2822 if (FLAG_inline_alloc &&
2824 // If this allocation is traced, program will jump to failure path
2825 // (i.e. the allocation stub) which will allocate the object and trace the
2826 // allocation call site.
2827 NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, temp_reg, distance));
2828 movl(instance, Address(THR, target::Thread::top_offset()));
2829 movl(end_address, instance);
2830
2831 addl(end_address, Immediate(instance_size));
2832 j(CARRY, failure);
2833
2834 // Check if the allocation fits into the remaining space.
2835 // EAX: potential new object start.
2836 // EBX: potential next object start.
2837 cmpl(end_address, Address(THR, target::Thread::end_offset()));
2838 j(ABOVE_EQUAL, failure);
2839 CheckAllocationCanary(instance);
2840
2841 // Successfully allocated the object(s), now update top to point to
2842 // next object start and initialize the object.
2843 movl(Address(THR, target::Thread::top_offset()), end_address);
2844 addl(instance, Immediate(kHeapObjectTag));
2845
2846 // Initialize the tags.
2847 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
2848 movl(FieldAddress(instance, target::Object::tags_offset()),
2849 Immediate(tags));
2850 } else {
2851 jmp(failure);
2852 }
2853}
2854
2855void Assembler::CopyMemoryWords(Register src,
2856 Register dst,
2857 Register size,
2858 Register temp) {
2859 // This loop is equivalent to
2860 // shrl(size, Immediate(target::kWordSizeLog2));
2861 // rep_movsd();
2862 // but shows better performance on certain micro-benchmarks.
2863 Label loop, done;
2864 cmpl(size, Immediate(0));
2865 j(EQUAL, &done, kNearJump);
2866 Bind(&loop);
2867 movl(temp, Address(src, 0));
2868 addl(src, Immediate(target::kWordSize));
2869 movl(Address(dst, 0), temp);
2870 addl(dst, Immediate(target::kWordSize));
2871 subl(size, Immediate(target::kWordSize));
2872 j(NOT_ZERO, &loop, kNearJump);
2873 Bind(&done);
2874}
2875
2876void Assembler::PushCodeObject() {
2877 DEBUG_ASSERT(IsNotTemporaryScopedHandle(code_));
2878 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2879 EmitUint8(0x68);
2880 buffer_.EmitObject(code_);
2881}
2882
2883void Assembler::EnterDartFrame(intptr_t frame_size) {
2884 EnterFrame(0);
2885
2886 PushCodeObject();
2887
2888 if (frame_size != 0) {
2889 subl(ESP, Immediate(frame_size));
2890 }
2891}
2892
2893void Assembler::LeaveDartFrame() {
2894 LeaveFrame();
2895}
2896
2897// On entry to a function compiled for OSR, the caller's frame pointer, the
2898// stack locals, and any copied parameters are already in place. The frame
2899// pointer is already set up. There may be extra space for spill slots to
2900// allocate.
2901void Assembler::EnterOsrFrame(intptr_t extra_size) {
2902 Comment("EnterOsrFrame");
2903 if (prologue_offset_ == -1) {
2904 Comment("PrologueOffset = %" Pd "", CodeSize());
2905 prologue_offset_ = CodeSize();
2906 }
2907
2908 if (extra_size != 0) {
2909 subl(ESP, Immediate(extra_size));
2910 }
2911}
2912
2913void Assembler::EnterStubFrame() {
2914 EnterDartFrame(0);
2915}
2916
2917void Assembler::LeaveStubFrame() {
2918 LeaveDartFrame();
2919}
2920
2921void Assembler::EnterCFrame(intptr_t frame_space) {
2922 // Already saved.
2924
2925 EnterFrame(0);
2926 ReserveAlignedFrameSpace(frame_space);
2927}
2928
2929void Assembler::LeaveCFrame() {
2930 LeaveFrame();
2931}
2932
2933void Assembler::EmitOperand(int rm, const Operand& operand) {
2934 ASSERT(rm >= 0 && rm < 8);
2935 const intptr_t length = operand.length_;
2936 ASSERT(length > 0);
2937 // Emit the ModRM byte updated with the given RM value.
2938 ASSERT((operand.encoding_[0] & 0x38) == 0);
2939 EmitUint8(operand.encoding_[0] + (rm << 3));
2940 // Emit the rest of the encoded operand.
2941 for (intptr_t i = 1; i < length; i++) {
2942 EmitUint8(operand.encoding_[i]);
2943 }
2944}
2945
2946void Assembler::EmitImmediate(const Immediate& imm) {
2947 EmitInt32(imm.value());
2948}
2949
2950void Assembler::EmitComplex(int rm,
2951 const Operand& operand,
2952 const Immediate& immediate) {
2953 ASSERT(rm >= 0 && rm < 8);
2954 if (immediate.is_int8()) {
2955 // Use sign-extended 8-bit immediate.
2956 EmitUint8(0x83);
2957 EmitOperand(rm, operand);
2958 EmitUint8(immediate.value() & 0xFF);
2959 } else if (operand.IsRegister(EAX)) {
2960 // Use short form if the destination is eax.
2961 EmitUint8(0x05 + (rm << 3));
2962 EmitImmediate(immediate);
2963 } else {
2964 EmitUint8(0x81);
2965 EmitOperand(rm, operand);
2966 EmitImmediate(immediate);
2967 }
2968}
2969
2970void Assembler::EmitLabel(Label* label, intptr_t instruction_size) {
2971 if (label->IsBound()) {
2972 intptr_t offset = label->Position() - buffer_.Size();
2973 ASSERT(offset <= 0);
2974 EmitInt32(offset - instruction_size);
2975 } else {
2976 EmitLabelLink(label);
2977 }
2978}
2979
2980void Assembler::EmitLabelLink(Label* label) {
2981 ASSERT(!label->IsBound());
2982 intptr_t position = buffer_.Size();
2983 EmitInt32(label->position_);
2984 label->LinkTo(position);
2985}
2986
2987void Assembler::EmitNearLabelLink(Label* label) {
2988 ASSERT(!label->IsBound());
2989 intptr_t position = buffer_.Size();
2990 EmitUint8(0);
2991 label->NearLinkTo(position);
2992}
2993
2994void Assembler::EmitGenericShift(int rm, Register reg, const Immediate& imm) {
2995 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2996 ASSERT(imm.is_int8());
2997 if (imm.value() == 1) {
2998 EmitUint8(0xD1);
2999 EmitOperand(rm, Operand(reg));
3000 } else {
3001 EmitUint8(0xC1);
3002 EmitOperand(rm, Operand(reg));
3003 EmitUint8(imm.value() & 0xFF);
3004 }
3005}
3006
3007void Assembler::EmitGenericShift(int rm,
3008 const Operand& operand,
3009 Register shifter) {
3010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
3011 ASSERT(shifter == ECX);
3012 EmitUint8(0xD3);
3013 EmitOperand(rm, Operand(operand));
3014}
3015
3016void Assembler::LoadClassId(Register result, Register object) {
3017 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3018 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3019 movl(result, FieldAddress(object, target::Object::tags_offset()));
3020 shrl(result, Immediate(target::UntaggedObject::kClassIdTagPos));
3021}
3022
3023void Assembler::LoadClassById(Register result, Register class_id) {
3024 ASSERT(result != class_id);
3025
3026 const intptr_t table_offset =
3027 target::IsolateGroup::cached_class_table_table_offset();
3028 LoadIsolateGroup(result);
3029 movl(result, Address(result, table_offset));
3030 movl(result, Address(result, class_id, TIMES_4, 0));
3031}
3032
3033void Assembler::CompareClassId(Register object,
3034 intptr_t class_id,
3035 Register scratch) {
3036 LoadClassId(scratch, object);
3037 cmpl(scratch, Immediate(class_id));
3038}
3039
3040void Assembler::SmiUntagOrCheckClass(Register object,
3041 intptr_t class_id,
3042 Register scratch,
3043 Label* is_smi) {
3044 ASSERT(kSmiTagShift == 1);
3045 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3046 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3047 // Untag optimistically. Tag bit is shifted into the CARRY.
3048 SmiUntag(object);
3049 j(NOT_CARRY, is_smi, kNearJump);
3050 // Load cid: can't use LoadClassId, object is untagged. Use TIMES_2 scale
3051 // factor in the addressing mode to compensate for this.
3052 movl(scratch, Address(object, TIMES_2,
3053 target::Object::tags_offset() + kHeapObjectTag));
3054 shrl(scratch, Immediate(target::UntaggedObject::kClassIdTagPos));
3055 cmpl(scratch, Immediate(class_id));
3056}
3057
3058void Assembler::LoadClassIdMayBeSmi(Register result, Register object) {
3059 if (result == object) {
3060 Label smi, join;
3061
3062 testl(object, Immediate(kSmiTagMask));
3063 j(EQUAL, &smi, Assembler::kNearJump);
3064 LoadClassId(result, object);
3065 jmp(&join, Assembler::kNearJump);
3066
3067 Bind(&smi);
3068 movl(result, Immediate(kSmiCid));
3069
3070 Bind(&join);
3071 } else {
3072 ASSERT(result != object);
3073 static const intptr_t kSmiCidSource =
3074 kSmiCid << target::UntaggedObject::kClassIdTagPos;
3075
3076 // Make a dummy "Object" whose cid is kSmiCid.
3077 movl(result, Immediate(reinterpret_cast<int32_t>(&kSmiCidSource) + 1));
3078
3079 // Check if object (in tmp) is a Smi.
3080 testl(object, Immediate(kSmiTagMask));
3081
3082 // If the object is not a Smi, use the original object to load the cid.
3083 // Otherwise, the dummy object is used, and the result is kSmiCid.
3084 cmovne(result, object);
3085 LoadClassId(result, result);
3086 }
3087}
3088
3089void Assembler::LoadTaggedClassIdMayBeSmi(Register result, Register object) {
3090 if (result == object) {
3091 Label smi, join;
3092
3093 testl(object, Immediate(kSmiTagMask));
3094 j(EQUAL, &smi, Assembler::kNearJump);
3095 LoadClassId(result, object);
3096 SmiTag(result);
3097 jmp(&join, Assembler::kNearJump);
3098
3099 Bind(&smi);
3100 movl(result, Immediate(target::ToRawSmi(kSmiCid)));
3101
3102 Bind(&join);
3103 } else {
3104 LoadClassIdMayBeSmi(result, object);
3105 SmiTag(result);
3106 }
3107}
3108
3109void Assembler::EnsureHasClassIdInDEBUG(intptr_t cid,
3110 Register src,
3111 Register scratch,
3112 bool can_be_null) {
3113#if defined(DEBUG)
3114 Comment("Check that object in register has cid %" Pd "", cid);
3115 Label matches;
3116 LoadClassIdMayBeSmi(scratch, src);
3117 CompareImmediate(scratch, cid);
3118 BranchIf(EQUAL, &matches, Assembler::kNearJump);
3119 if (can_be_null) {
3120 CompareImmediate(scratch, kNullCid);
3121 BranchIf(EQUAL, &matches, Assembler::kNearJump);
3122 }
3123 Breakpoint();
3124 Bind(&matches);
3125#endif
3126}
3127
3128bool Assembler::AddressCanHoldConstantIndex(const Object& constant,
3129 bool is_external,
3130 intptr_t cid,
3131 intptr_t index_scale) {
3132 if (!IsSafeSmi(constant)) return false;
3133 const int64_t index = target::SmiValue(constant);
3134 const int64_t offset =
3135 is_external ? 0 : (target::Instance::DataOffsetFor(cid) - kHeapObjectTag);
3136 const int64_t disp = index * index_scale + offset;
3137 return Utils::IsInt(32, disp);
3138}
3139
3140Address Assembler::ElementAddressForIntIndex(bool is_external,
3141 intptr_t cid,
3142 intptr_t index_scale,
3143 Register array,
3144 intptr_t index,
3145 intptr_t extra_disp) {
3146 if (is_external) {
3147 return Address(array, index * index_scale + extra_disp);
3148 } else {
3149 const int64_t disp = static_cast<int64_t>(index) * index_scale +
3150 target::Instance::DataOffsetFor(cid) + extra_disp;
3151 ASSERT(Utils::IsInt(32, disp));
3152 return FieldAddress(array, static_cast<int32_t>(disp));
3153 }
3154}
3155
3156Address Assembler::ElementAddressForRegIndex(bool is_external,
3157 intptr_t cid,
3158 intptr_t index_scale,
3159 bool index_unboxed,
3160 Register array,
3161 Register index,
3162 intptr_t extra_disp) {
3163 if (is_external) {
3164 return Address(array, index, ToScaleFactor(index_scale, index_unboxed),
3165 extra_disp);
3166 } else {
3167 return FieldAddress(array, index, ToScaleFactor(index_scale, index_unboxed),
3168 target::Instance::DataOffsetFor(cid) + extra_disp);
3169 }
3170}
3171
3172void Assembler::RangeCheck(Register value,
3173 Register temp,
3174 intptr_t low,
3175 intptr_t high,
3176 RangeCheckCondition condition,
3177 Label* target) {
3178 auto cc = condition == kIfInRange ? BELOW_EQUAL : ABOVE;
3179 Register to_check = value;
3180 if (temp != kNoRegister) {
3181 movl(temp, value);
3182 to_check = temp;
3183 }
3184 subl(to_check, Immediate(low));
3185 cmpl(to_check, Immediate(high - low));
3186 j(cc, target);
3187}
3188
3189} // namespace compiler
3190} // namespace dart
3191
3192#endif // defined(TARGET_ARCH_IA32)
Align
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition: DM.cpp:263
int count
Definition: FontMgrTest.cpp:50
SkPoint pos
static float next(float f)
static const double J
static bool ok(int result)
#define EQUAL(field)
bool equals(SkDrawable *a, SkDrawable *b)
#define __
#define UNREACHABLE()
Definition: assert.h:248
#define DEBUG_ASSERT(cond)
Definition: assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition: assert.h:309
#define COMPILE_ASSERT(expr)
Definition: assert.h:339
GLenum type
static intptr_t ActivationFrameAlignment()
static bool sse4_1_supported()
Definition: cpu_ia32.h:61
static bool popcnt_supported()
Definition: cpu_ia32.h:62
static bool abm_supported()
Definition: cpu_ia32.h:63
static bool IsInt(intptr_t N, T value)
Definition: utils.h:313
static int32_t Low32Bits(int64_t value)
Definition: utils.h:369
static constexpr T NBitMask(size_t n)
Definition: utils.h:548
static int32_t High32Bits(int64_t value)
Definition: utils.h:373
static Address Absolute(const uword addr)
void Stop(const char *message)
uword CodeAddress(intptr_t offset)
void Comment(const char *format,...) PRINTF_ATTRIBUTE(2
void PushRegistersInOrder(std::initializer_list< Register > regs)
void maxps(XmmRegister dst, XmmRegister src)
void jmp(Register reg)
void unpcklpd(XmmRegister dst, XmmRegister src)
void FloatNegate(XmmRegister f)
void cvtdq2pd(XmmRegister dst, XmmRegister src)
void minpd(XmmRegister dst, XmmRegister src)
void fildl(const Address &src)
void notl(Register reg)
void CombineHashes(Register dst, Register other) override
void cmovns(Register dst, Register src)
void BranchIfSmi(Register reg, Label *label, JumpDistance distance=kFarJump) override
void movups(XmmRegister dst, const Address &src)
void idivl(Register reg)
void Load(Register reg, const Address &address, OperandSize type, Condition cond)
void unpckhpd(XmmRegister dst, XmmRegister src)
void movq(const Address &dst, XmmRegister src)
void shldl(Register dst, Register src, Register shifter)
void divl(Register reg)
void LoadIsolate(Register rd)
void mulss(XmmRegister dst, XmmRegister src)
void IncrementSmiField(const Address &dest, int32_t increment)
void pxor(XmmRegister dst, XmmRegister src)
void movlhps(XmmRegister dst, XmmRegister src)
void ZeroInitSmiField(const Address &dest)
void pcmpeqq(XmmRegister dst, XmmRegister src)
void lzcntl(Register dst, Register src)
void decl(Register reg)
void cvtss2si(Register dst, XmmRegister src)
void movd(XmmRegister dst, Register src)
void negateps(XmmRegister dst)
void movl(Register dst, const Immediate &src)
void cvtsi2ss(XmmRegister dst, Register src)
void filds(const Address &src)
void j(Condition condition, Label *label, JumpDistance distance=kFarJump)
void maxpd(XmmRegister dst, XmmRegister src)
void addpd(XmmRegister dst, XmmRegister src)
void LoadFromStack(Register dst, intptr_t depth)
void movmskpd(Register dst, XmmRegister src)
void leal(Register dst, const Address &src)
void shufps(XmmRegister dst, XmmRegister src, const Immediate &mask)
void zerowps(XmmRegister dst)
void shll(Register reg, const Immediate &imm)
void shufpd(XmmRegister dst, XmmRegister src, const Immediate &mask)
void pushl(Register reg)
void negl(Register reg)
void PushRegister(Register r)
void b(Label *label, Condition cond=AL)
void cvtpd2ps(XmmRegister dst, XmmRegister src)
void set1ps(XmmRegister dst, Register tmp, const Immediate &imm)
void orps(XmmRegister dst, XmmRegister src)
void mulps(XmmRegister dst, XmmRegister src)
void divsd(XmmRegister dst, XmmRegister src)
void roundsd(XmmRegister dst, XmmRegister src, RoundingMode mode)
void cmove(Register dst, Register src)
void pextrd(Register dst, XmmRegister src, const Immediate &imm)
void cvtps2pd(XmmRegister dst, XmmRegister src)
void LoadDImmediate(DRegister dd, double value, Register scratch, Condition cond=AL)
void subpl(XmmRegister dst, XmmRegister src)
void mull(Register reg)
void cmppsneq(XmmRegister dst, XmmRegister src)
void mulsd(XmmRegister dst, XmmRegister src)
void divpd(XmmRegister dst, XmmRegister src)
void movmskps(Register dst, XmmRegister src)
void sqrtss(XmmRegister dst, XmmRegister src)
void rsqrtps(XmmRegister dst)
void cmppseq(XmmRegister dst, XmmRegister src)
void StoreObjectIntoObjectNoBarrier(Register object, const Address &dest, const Object &value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes) override
void cvtss2sd(XmmRegister dst, XmmRegister src)
void movzxw(Register dst, Register src)
void cmovs(Register dst, Register src)
void cmpw(Register rn, Operand o)
void xorpd(XmmRegister dst, const Address &src)
void movsd(XmmRegister dst, const Address &src)
void fistpl(const Address &dst)
void cmppsnle(XmmRegister dst, XmmRegister src)
void flds(const Address &src)
void BranchIf(Condition condition, Label *label, JumpDistance distance=kFarJump)
void CompareObject(Register rn, const Object &object)
void bt(Register base, Register offset)
void comisd(XmmRegister a, XmmRegister b)
void fstps(const Address &dst)
void PushObject(const Object &object)
void pmovsxdq(XmmRegister dst, XmmRegister src)
void AndRegisters(Register dst, Register src1, Register src2=kNoRegister) override
void sqrtpd(XmmRegister dst)
void testb(const Address &address, const Immediate &imm)
void sarl(Register reg, const Immediate &imm)
void LoadQImmediate(QRegister dd, simd128_value_t value)
void divss(XmmRegister dst, XmmRegister src)
void testl(Register reg1, Register reg2)
void cvttsd2si(Register dst, XmmRegister src)
void incl(Register reg)
void StoreToStack(Register src, intptr_t depth)
void subpd(XmmRegister dst, XmmRegister src)
void StoreInternalPointer(Register object, const Address &dest, Register value)
void cmppslt(XmmRegister dst, XmmRegister src)
void sqrtsd(XmmRegister dst, XmmRegister src)
void andps(XmmRegister dst, XmmRegister src)
void Bind(Label *label) override
void ReserveAlignedFrameSpace(intptr_t frame_space)
void cmppsle(XmmRegister dst, XmmRegister src)
void cmovno(Register dst, Register src)
void andpd(XmmRegister dst, const Address &src)
void cmovne(Register dst, Register src)
void LoadClassId(Register result, Register object, Condition cond=AL)
void enter(const Immediate &imm)
void setcc(Condition condition, ByteRegister dst)
void cvtsi2sd(XmmRegister dst, Register src)
void PopRegister(Register r)
void ArrayStoreBarrier(Register object, Register slot, Register value, CanBeSmi can_be_smi, Register scratch) override
void xchgl(Register dst, Register src)
void subss(XmmRegister dst, XmmRegister src)
void xorps(XmmRegister dst, const Address &src)
void ExitFullSafepoint(Register scratch0, Register scratch1, bool ignore_unwind_in_progress)
void subps(XmmRegister dst, XmmRegister src)
void popcntl(Register dst, Register src)
void addpl(XmmRegister dst, XmmRegister src)
void CallRuntime(const RuntimeEntry &entry, intptr_t argument_count)
void LockCmpxchgl(const Address &address, Register reg)
void LoadObject(Register rd, const Object &object, Condition cond=AL)
void CompareRegisters(Register rn, Register rm)
void fnstcw(const Address &dst)
void negatepd(XmmRegister dst)
void CompareToStack(Register src, intptr_t depth)
void addsd(XmmRegister dst, XmmRegister src)
void fldcw(const Address &src)
static Address VMTagAddress()
void cvtsd2ss(XmmRegister dst, XmmRegister src)
void cvtsd2si(Register dst, XmmRegister src)
void bsfl(Register dst, Register src)
void notps(XmmRegister dst)
void absps(XmmRegister dst)
void subsd(XmmRegister dst, XmmRegister src)
static bool IsSafeSmi(const Object &object)
void movsxb(Register dst, ByteRegister src)
void TransitionGeneratedToNative(Register destination_address, Register exit_frame_fp, Register exit_through_ffi, Register scratch0, bool enter_safepoint)
void SubImmediate(Register rd, Register rn, int32_t value, Condition cond=AL)
void Drop(intptr_t stack_elements)
void imull(Register dst, Register src)
void DoubleAbs(XmmRegister reg)
void reciprocalps(XmmRegister dst)
void minps(XmmRegister dst, XmmRegister src)
void StoreBarrier(Register object, Register value, CanBeSmi can_be_smi, Register scratch) override
void SmiTag(Register reg, Condition cond)
void VerifyStoreNeedsNoWriteBarrier(Register object, Register value) override
void CompareWords(Register reg1, Register reg2, intptr_t offset, Register count, Register temp, Label *equals) override
void addss(XmmRegister dst, XmmRegister src)
void ExtendValue(Register rd, Register rm, OperandSize sz, Condition cond)
void mulpd(XmmRegister dst, XmmRegister src)
void pmovmskb(Register dst, XmmRegister src)
void movhlps(XmmRegister dst, XmmRegister src)
void movb(Register dst, const Address &src)
void BranchIfNotSmi(Register reg, Label *label, JumpDistance distance=kFarJump)
void divps(XmmRegister dst, XmmRegister src)
void popl(Register reg)
void abspd(XmmRegister dst)
void StoreIntoSmiField(const Address &dest, Register value)
void addps(XmmRegister dst, XmmRegister src)
void TransitionNativeToGenerated(Register scratch0, Register scratch1, bool exit_safepoint, bool ignore_unwind_in_progress=false, bool set_tag=true)
void fistps(const Address &dst)
void LoadSImmediate(SRegister sd, float value, Condition cond=AL)
void DoubleNegate(XmmRegister d)
void shrl(Register reg, const Immediate &imm)
void cvttss2si(Register dst, XmmRegister src)
void LoadIsolateGroup(Register dst)
void cmovgel(Register dst, Register src)
void BranchOnMonomorphicCheckedEntryJIT(Label *label)
void Store(Register reg, const Address &address, OperandSize type, Condition cond)
void movzxb(Register dst, ByteRegister src)
void movss(XmmRegister dst, const Address &src)
void cmpxchgl(const Address &address, Register reg)
void MoveRegister(Register rd, Register rm, Condition cond)
void Breakpoint() override
static constexpr intptr_t kCallExternalLabelSize
void EnterFrame(RegList regs, intptr_t frame_space)
void sqrtps(XmmRegister dst)
void fldl(const Address &src)
void unpckhps(XmmRegister dst, XmmRegister src)
void cmovlessl(Register dst, Register src)
void FinalizeHashForSize(intptr_t bit_size, Register dst, Register scratch=TMP) override
void orpd(XmmRegister dst, XmmRegister src)
void cmpb(const Address &address, const Immediate &imm)
void call(Register reg)
void LoadObjectSafely(Register dst, const Object &object)
void shrdl(Register dst, Register src, Register shifter)
void AddImmediate(Register rd, int32_t value, Condition cond=AL)
void cmppsnlt(XmmRegister dst, XmmRegister src)
void movsxw(Register dst, Register src)
void fstpl(const Address &dst)
void bsrl(Register dst, Register src)
void unpcklps(XmmRegister dst, XmmRegister src)
void comiss(XmmRegister a, XmmRegister b)
void ffree(intptr_t value)
void ArithmeticShiftRightImmediate(Register reg, intptr_t shift) override
void movaps(XmmRegister dst, XmmRegister src)
LeafRuntimeScope(Assembler *assembler, intptr_t frame_size, bool preserve_registers)
static word element_offset(intptr_t index)
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static const word kMonomorphicEntryOffsetJIT
Definition: runtime_api.h:1379
static const word kPolymorphicEntryOffsetJIT
Definition: runtime_api.h:1380
static uword full_safepoint_state_acquired()
Definition: runtime_api.cc:899
static word array_write_barrier_entry_point_offset()
static word exit_through_ffi_offset()
static uword native_execution_state()
Definition: runtime_api.cc:907
static word call_to_runtime_entry_point_offset()
static word top_exit_frame_info_offset()
static word write_barrier_wrappers_thread_offset(Register regno)
static uword generated_execution_state()
Definition: runtime_api.cc:903
static word safepoint_state_offset()
static word switchable_call_miss_entry_offset()
static uword full_safepoint_state_unacquired()
Definition: runtime_api.cc:895
static word exit_safepoint_stub_offset()
static word enter_safepoint_stub_offset()
static word write_barrier_mask_offset()
static word execution_state_offset()
static const word kNewOrEvacuationCandidateBit
Definition: runtime_api.h:421
#define UNIMPLEMENTED
@ kNormal
Default priority level.
Definition: embedder.h:262
#define ASSERT(E)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition: main.cc:19
VkInstance instance
Definition: main.cc:48
static bool b
struct MyStruct a[10]
#define FATAL(error)
uint8_t value
GAsyncResult * result
uint32_t * target
int argument_count
Definition: fuchsia.cc:52
size_t length
ClipOpAndAA opAA SkRegion region
Definition: SkRecords.h:238
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
Definition: runtime_api.cc:360
word ToRawSmi(const dart::Object &a)
Definition: runtime_api.cc:960
static constexpr intptr_t kWordSize
Definition: runtime_api.h:274
bool IsSmi(int64_t v)
Definition: runtime_api.cc:31
word SmiValue(const dart::Object &a)
Definition: runtime_api.cc:969
FrameLayout frame_layout
Definition: stack_frame.cc:76
bool IsOriginalObject(const Object &object)
Definition: runtime_api.cc:226
InvalidClass kObjectAlignment
int32_t CreateJitCookie()
Definition: runtime_api.cc:247
bool IsInOldSpace(const Object &obj)
Definition: runtime_api.cc:101
const Object & ToObject(const Code &handle)
Definition: runtime_api.h:173
Definition: dart_vm.cc:33
const Register kWriteBarrierSlotReg
const Register THR
static const struct dart::ALIGN16 float_negate_constant
static const struct dart::ALIGN16 float_not_constant
const Register kWriteBarrierObjectReg
constexpr int32_t kMinInt32
Definition: globals.h:482
static const struct dart::ALIGN16 float_absolute_constant
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
Definition: constants.h:85
static const struct dart::ALIGN16 double_negate_constant
static const struct dart::ALIGN16 float_zerow_constant
@ kNullCid
Definition: class_id.h:252
uintptr_t uword
Definition: globals.h:501
const Register CODE_REG
@ NOT_CARRY
Definition: constants_x86.h:35
@ NOT_ZERO
@ BELOW_EQUAL
Definition: constants_x86.h:19
@ NOT_EQUAL
@ ABOVE_EQUAL
Definition: constants_x86.h:16
@ kNoRegister
Definition: constants_arm.h:99
bool IsAllocatableInNewSpace(intptr_t size)
Definition: spaces.h:57
constexpr intptr_t kBitsPerInt32
Definition: globals.h:466
const intptr_t cid
const int MAX_NOP_SIZE
constexpr intptr_t kWordSize
Definition: globals.h:509
@ kHeapObjectTag
@ kSmiTagMask
@ kSmiTagShift
static ScaleFactor ToScaleFactor(intptr_t index_scale, bool index_unboxed)
Definition: constants.h:95
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
static const struct dart::ALIGN16 double_abs_constant
const int kFpuRegisterSize
ByteRegister ByteRegisterOf(Register reg)
@ kNumberOfXmmRegisters
def call(args)
Definition: dom.py:159
constexpr SkISize kSize
Definition: mock_canvas.cc:17
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition: switches.h:228
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
static size_t bytes_needed(int vertex_count, Flags flags, int index_count)
Definition: dl_vertices.cc:23
def matches(file)
Definition: gen_manifest.py:38
dst
Definition: cp.py:12
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
constexpr T Absolute(const T &val)
Definition: scalar.h:21
dest
Definition: zip.py:79
#define Pd
Definition: globals.h:408
static SkString join(const CommandLineFlags::StringArray &)
Definition: skpbench.cpp:741
SeparatedVector2 offset
#define NOT_IN_PRODUCT(code)
Definition: globals.h:84
#define ALIGN16
Definition: globals.h:172