blob: c0ca7ef437adc0a0e44fdeaee2d70089cb6f19fe [file] [log] [blame]
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_x86_64.h"
18
19#include "base/casts.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "memory_region.h"
22#include "thread.h"
23
24namespace art {
25namespace x86_64 {
26
Ian Rogersdd7624d2014-03-14 17:43:00 -070027std::ostream& operator<<(std::ostream& os, const CpuRegister& reg) {
28 return os << reg.AsRegister();
29}
30
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070031std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
Ian Rogersdd7624d2014-03-14 17:43:00 -070032 return os << reg.AsFloatRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070033}
34
35std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
36 return os << "ST" << static_cast<int>(reg);
37}
38
Ian Rogersdd7624d2014-03-14 17:43:00 -070039void X86_64Assembler::call(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070040 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070041 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070042 EmitUint8(0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -070043 EmitRegisterOperand(2, reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070044}
45
46
47void X86_64Assembler::call(const Address& address) {
48 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070049 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070050 EmitUint8(0xFF);
51 EmitOperand(2, address);
52}
53
54
55void X86_64Assembler::call(Label* label) {
56 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
57 EmitUint8(0xE8);
58 static const int kSize = 5;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +000059 // Offset by one because we already have emitted the opcode.
60 EmitLabel(label, kSize - 1);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070061}
62
Ian Rogersdd7624d2014-03-14 17:43:00 -070063void X86_64Assembler::pushq(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070064 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070065 EmitOptionalRex32(reg);
66 EmitUint8(0x50 + reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070067}
68
69
70void X86_64Assembler::pushq(const Address& address) {
71 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070072 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070073 EmitUint8(0xFF);
74 EmitOperand(6, address);
75}
76
77
78void X86_64Assembler::pushq(const Immediate& imm) {
79 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -070080 CHECK(imm.is_int32()); // pushq only supports 32b immediate.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070081 if (imm.is_int8()) {
82 EmitUint8(0x6A);
83 EmitUint8(imm.value() & 0xFF);
84 } else {
85 EmitUint8(0x68);
86 EmitImmediate(imm);
87 }
88}
89
90
Ian Rogersdd7624d2014-03-14 17:43:00 -070091void X86_64Assembler::popq(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070092 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070093 EmitOptionalRex32(reg);
94 EmitUint8(0x58 + reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070095}
96
97
98void X86_64Assembler::popq(const Address& address) {
99 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700100 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700101 EmitUint8(0x8F);
102 EmitOperand(0, address);
103}
104
105
Ian Rogersdd7624d2014-03-14 17:43:00 -0700106void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700107 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -0700108 if (imm.is_int32()) {
109 // 32 bit. Note: sign-extends.
110 EmitRex64(dst);
111 EmitUint8(0xC7);
112 EmitRegisterOperand(0, dst.LowBits());
113 EmitInt32(static_cast<int32_t>(imm.value()));
114 } else {
115 EmitRex64(dst);
116 EmitUint8(0xB8 + dst.LowBits());
117 EmitInt64(imm.value());
118 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700119}
120
121
Ian Rogersdd7624d2014-03-14 17:43:00 -0700122void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) {
Roland Levillain946e1432014-11-11 17:35:19 +0000123 CHECK(imm.is_int32());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700124 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700125 EmitOptionalRex32(dst);
126 EmitUint8(0xB8 + dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700127 EmitImmediate(imm);
128}
129
130
Mark Mendell40741f32015-04-20 22:10:34 -0400131void X86_64Assembler::movq(const Address& dst, const Immediate& imm) {
132 CHECK(imm.is_int32());
133 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
134 EmitRex64(dst);
135 EmitUint8(0xC7);
136 EmitOperand(0, dst);
137 EmitImmediate(imm);
138}
139
140
Ian Rogersdd7624d2014-03-14 17:43:00 -0700141void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700142 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -0700143 // 0x89 is movq r/m64 <- r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
144 EmitRex64(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700145 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700146 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700147}
148
149
Ian Rogersdd7624d2014-03-14 17:43:00 -0700150void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700151 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700152 EmitOptionalRex32(dst, src);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000153 EmitUint8(0x8B);
154 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700155}
156
157
Ian Rogersdd7624d2014-03-14 17:43:00 -0700158void X86_64Assembler::movq(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700159 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700160 EmitRex64(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700161 EmitUint8(0x8B);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700162 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700163}
164
165
Ian Rogersdd7624d2014-03-14 17:43:00 -0700166void X86_64Assembler::movl(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700168 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700169 EmitUint8(0x8B);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700170 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700171}
172
173
Ian Rogersdd7624d2014-03-14 17:43:00 -0700174void X86_64Assembler::movq(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700175 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700176 EmitRex64(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700177 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700178 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700179}
180
181
Ian Rogersdd7624d2014-03-14 17:43:00 -0700182void X86_64Assembler::movl(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700183 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700184 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700185 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700186 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700187}
188
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700189void X86_64Assembler::movl(const Address& dst, const Immediate& imm) {
190 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700191 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700192 EmitUint8(0xC7);
193 EmitOperand(0, dst);
194 EmitImmediate(imm);
195}
196
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800197
198void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src) {
199 cmov(c, dst, src, true);
200}
201
202void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src, bool is64bit) {
203 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
204 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex());
205 EmitUint8(0x0F);
206 EmitUint8(0x40 + c);
207 EmitRegisterOperand(dst.LowBits(), src.LowBits());
208}
209
210
Ian Rogersdd7624d2014-03-14 17:43:00 -0700211void X86_64Assembler::movzxb(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700212 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700213 EmitOptionalByteRegNormalizingRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700214 EmitUint8(0x0F);
215 EmitUint8(0xB6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700216 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700217}
218
219
Ian Rogersdd7624d2014-03-14 17:43:00 -0700220void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700221 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Chao-ying Fud23840d2015-04-07 16:03:04 -0700222 // Byte register is only in the source register form, so we don't use
223 // EmitOptionalByteRegNormalizingRex32(dst, src);
224 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700225 EmitUint8(0x0F);
226 EmitUint8(0xB6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700227 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700228}
229
230
Ian Rogersdd7624d2014-03-14 17:43:00 -0700231void X86_64Assembler::movsxb(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700232 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700233 EmitOptionalByteRegNormalizingRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700234 EmitUint8(0x0F);
235 EmitUint8(0xBE);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700236 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700237}
238
239
Ian Rogersdd7624d2014-03-14 17:43:00 -0700240void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700241 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Chao-ying Fud23840d2015-04-07 16:03:04 -0700242 // Byte register is only in the source register form, so we don't use
243 // EmitOptionalByteRegNormalizingRex32(dst, src);
244 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700245 EmitUint8(0x0F);
246 EmitUint8(0xBE);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700247 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700248}
249
250
Ian Rogersdd7624d2014-03-14 17:43:00 -0700251void X86_64Assembler::movb(CpuRegister /*dst*/, const Address& /*src*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700252 LOG(FATAL) << "Use movzxb or movsxb instead.";
253}
254
255
Ian Rogersdd7624d2014-03-14 17:43:00 -0700256void X86_64Assembler::movb(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700257 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700258 EmitOptionalByteRegNormalizingRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700259 EmitUint8(0x88);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700260 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700261}
262
263
264void X86_64Assembler::movb(const Address& dst, const Immediate& imm) {
265 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100266 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700267 EmitUint8(0xC6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700268 EmitOperand(Register::RAX, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700269 CHECK(imm.is_int8());
270 EmitUint8(imm.value() & 0xFF);
271}
272
273
Ian Rogersdd7624d2014-03-14 17:43:00 -0700274void X86_64Assembler::movzxw(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700275 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700276 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700277 EmitUint8(0x0F);
278 EmitUint8(0xB7);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700279 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700280}
281
282
Ian Rogersdd7624d2014-03-14 17:43:00 -0700283void X86_64Assembler::movzxw(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700284 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700285 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700286 EmitUint8(0x0F);
287 EmitUint8(0xB7);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700288 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700289}
290
291
Ian Rogersdd7624d2014-03-14 17:43:00 -0700292void X86_64Assembler::movsxw(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700293 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700294 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700295 EmitUint8(0x0F);
296 EmitUint8(0xBF);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700297 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700298}
299
300
Ian Rogersdd7624d2014-03-14 17:43:00 -0700301void X86_64Assembler::movsxw(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700302 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700303 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700304 EmitUint8(0x0F);
305 EmitUint8(0xBF);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700306 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700307}
308
309
Ian Rogersdd7624d2014-03-14 17:43:00 -0700310void X86_64Assembler::movw(CpuRegister /*dst*/, const Address& /*src*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700311 LOG(FATAL) << "Use movzxw or movsxw instead.";
312}
313
314
Ian Rogersdd7624d2014-03-14 17:43:00 -0700315void X86_64Assembler::movw(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700316 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
317 EmitOperandSizeOverride();
Nicolas Geoffraye4ded412014-08-05 22:52:45 +0100318 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700319 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700320 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700321}
322
323
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100324void X86_64Assembler::movw(const Address& dst, const Immediate& imm) {
325 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
326 EmitOperandSizeOverride();
327 EmitOptionalRex32(dst);
328 EmitUint8(0xC7);
329 EmitOperand(Register::RAX, dst);
Nicolas Geoffrayb6e72062014-10-07 14:54:48 +0100330 CHECK(imm.is_uint16() || imm.is_int16());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100331 EmitUint8(imm.value() & 0xFF);
332 EmitUint8(imm.value() >> 8);
333}
334
335
Ian Rogersdd7624d2014-03-14 17:43:00 -0700336void X86_64Assembler::leaq(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700337 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700338 EmitRex64(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700339 EmitUint8(0x8D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700340 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700341}
342
343
Nicolas Geoffray748f1402015-01-27 08:17:54 +0000344void X86_64Assembler::leal(CpuRegister dst, const Address& src) {
345 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
346 EmitOptionalRex32(dst, src);
347 EmitUint8(0x8D);
348 EmitOperand(dst.LowBits(), src);
349}
350
351
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100352void X86_64Assembler::movaps(XmmRegister dst, XmmRegister src) {
353 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
354 EmitOptionalRex32(dst, src);
355 EmitUint8(0x0F);
356 EmitUint8(0x28);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100357 EmitXmmRegisterOperand(dst.LowBits(), src);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100358}
359
360
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700361void X86_64Assembler::movss(XmmRegister dst, const Address& src) {
362 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
363 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700364 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700365 EmitUint8(0x0F);
366 EmitUint8(0x10);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700367 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700368}
369
370
371void X86_64Assembler::movss(const Address& dst, XmmRegister src) {
372 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
373 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700374 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700375 EmitUint8(0x0F);
376 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700377 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700378}
379
380
381void X86_64Assembler::movss(XmmRegister dst, XmmRegister src) {
382 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
383 EmitUint8(0xF3);
Andreas Gampe851df202014-11-12 14:05:46 -0800384 EmitOptionalRex32(src, dst); // Movss is MR encoding instead of the usual RM.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700385 EmitUint8(0x0F);
386 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700387 EmitXmmRegisterOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700388}
389
390
Roland Levillaindff1f282014-11-05 14:15:05 +0000391void X86_64Assembler::movsxd(CpuRegister dst, CpuRegister src) {
392 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000393 EmitRex64(dst, src);
Roland Levillaindff1f282014-11-05 14:15:05 +0000394 EmitUint8(0x63);
395 EmitRegisterOperand(dst.LowBits(), src.LowBits());
396}
397
398
399void X86_64Assembler::movsxd(CpuRegister dst, const Address& src) {
400 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -0400401 EmitRex64(dst, src);
Roland Levillaindff1f282014-11-05 14:15:05 +0000402 EmitUint8(0x63);
403 EmitOperand(dst.LowBits(), src);
404}
405
406
Ian Rogersdd7624d2014-03-14 17:43:00 -0700407void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800408 movd(dst, src, true);
409}
410
411void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) {
412 movd(dst, src, true);
413}
414
415void X86_64Assembler::movd(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700416 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
417 EmitUint8(0x66);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800418 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700419 EmitUint8(0x0F);
420 EmitUint8(0x6E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700421 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700422}
423
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800424void X86_64Assembler::movd(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700425 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
426 EmitUint8(0x66);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800427 EmitOptionalRex(false, is64bit, src.NeedsRex(), false, dst.NeedsRex());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700428 EmitUint8(0x0F);
429 EmitUint8(0x7E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700430 EmitOperand(src.LowBits(), Operand(dst));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700431}
432
433
434void X86_64Assembler::addss(XmmRegister dst, XmmRegister src) {
435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
436 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700437 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700438 EmitUint8(0x0F);
439 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700440 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700441}
442
443
444void X86_64Assembler::addss(XmmRegister dst, const Address& src) {
445 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
446 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700447 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700448 EmitUint8(0x0F);
449 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700450 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700451}
452
453
454void X86_64Assembler::subss(XmmRegister dst, XmmRegister src) {
455 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
456 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700457 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700458 EmitUint8(0x0F);
459 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700460 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700461}
462
463
464void X86_64Assembler::subss(XmmRegister dst, const Address& src) {
465 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
466 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700467 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700468 EmitUint8(0x0F);
469 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700470 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700471}
472
473
474void X86_64Assembler::mulss(XmmRegister dst, XmmRegister src) {
475 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
476 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700477 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700478 EmitUint8(0x0F);
479 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700480 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700481}
482
483
484void X86_64Assembler::mulss(XmmRegister dst, const Address& src) {
485 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
486 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700487 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700488 EmitUint8(0x0F);
489 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700490 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700491}
492
493
494void X86_64Assembler::divss(XmmRegister dst, XmmRegister src) {
495 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
496 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700497 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700498 EmitUint8(0x0F);
499 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700500 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700501}
502
503
504void X86_64Assembler::divss(XmmRegister dst, const Address& src) {
505 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
506 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700507 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700508 EmitUint8(0x0F);
509 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700510 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700511}
512
513
514void X86_64Assembler::flds(const Address& src) {
515 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
516 EmitUint8(0xD9);
517 EmitOperand(0, src);
518}
519
520
Mark Mendell24f2dfa2015-01-14 19:51:45 -0500521void X86_64Assembler::fsts(const Address& dst) {
522 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
523 EmitUint8(0xD9);
524 EmitOperand(2, dst);
525}
526
527
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700528void X86_64Assembler::fstps(const Address& dst) {
529 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
530 EmitUint8(0xD9);
531 EmitOperand(3, dst);
532}
533
534
535void X86_64Assembler::movsd(XmmRegister dst, const Address& src) {
536 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
537 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700538 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700539 EmitUint8(0x0F);
540 EmitUint8(0x10);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700541 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700542}
543
544
545void X86_64Assembler::movsd(const Address& dst, XmmRegister src) {
546 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
547 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700548 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700549 EmitUint8(0x0F);
550 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700551 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700552}
553
554
555void X86_64Assembler::movsd(XmmRegister dst, XmmRegister src) {
556 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
557 EmitUint8(0xF2);
Andreas Gampe851df202014-11-12 14:05:46 -0800558 EmitOptionalRex32(src, dst); // Movsd is MR encoding instead of the usual RM.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700559 EmitUint8(0x0F);
560 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700561 EmitXmmRegisterOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700562}
563
564
565void X86_64Assembler::addsd(XmmRegister dst, XmmRegister src) {
566 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
567 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700568 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700569 EmitUint8(0x0F);
570 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700571 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700572}
573
574
575void X86_64Assembler::addsd(XmmRegister dst, const Address& src) {
576 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
577 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700578 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700579 EmitUint8(0x0F);
580 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700581 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700582}
583
584
585void X86_64Assembler::subsd(XmmRegister dst, XmmRegister src) {
586 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
587 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700588 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700589 EmitUint8(0x0F);
590 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700591 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700592}
593
594
595void X86_64Assembler::subsd(XmmRegister dst, const Address& src) {
596 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
597 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700598 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700599 EmitUint8(0x0F);
600 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700601 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700602}
603
604
605void X86_64Assembler::mulsd(XmmRegister dst, XmmRegister src) {
606 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
607 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700608 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700609 EmitUint8(0x0F);
610 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700611 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700612}
613
614
615void X86_64Assembler::mulsd(XmmRegister dst, const Address& src) {
616 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
617 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700618 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700619 EmitUint8(0x0F);
620 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700621 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700622}
623
624
625void X86_64Assembler::divsd(XmmRegister dst, XmmRegister src) {
626 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
627 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700628 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700629 EmitUint8(0x0F);
630 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700631 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700632}
633
634
635void X86_64Assembler::divsd(XmmRegister dst, const Address& src) {
636 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
637 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700638 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700639 EmitUint8(0x0F);
640 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700641 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700642}
643
644
Ian Rogersdd7624d2014-03-14 17:43:00 -0700645void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src) {
Roland Levillain6d0e4832014-11-27 18:31:21 +0000646 cvtsi2ss(dst, src, false);
647}
648
649
650void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700651 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
652 EmitUint8(0xF3);
Roland Levillain6d0e4832014-11-27 18:31:21 +0000653 if (is64bit) {
654 // Emit a REX.W prefix if the operand size is 64 bits.
655 EmitRex64(dst, src);
656 } else {
657 EmitOptionalRex32(dst, src);
658 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700659 EmitUint8(0x0F);
660 EmitUint8(0x2A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700661 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700662}
663
664
Mark Mendell40741f32015-04-20 22:10:34 -0400665void X86_64Assembler::cvtsi2ss(XmmRegister dst, const Address& src, bool is64bit) {
666 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
667 EmitUint8(0xF3);
668 if (is64bit) {
669 // Emit a REX.W prefix if the operand size is 64 bits.
670 EmitRex64(dst, src);
671 } else {
672 EmitOptionalRex32(dst, src);
673 }
674 EmitUint8(0x0F);
675 EmitUint8(0x2A);
676 EmitOperand(dst.LowBits(), src);
677}
678
679
Ian Rogersdd7624d2014-03-14 17:43:00 -0700680void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src) {
Roland Levillain647b9ed2014-11-27 12:06:00 +0000681 cvtsi2sd(dst, src, false);
682}
683
684
685void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700686 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
687 EmitUint8(0xF2);
Roland Levillain647b9ed2014-11-27 12:06:00 +0000688 if (is64bit) {
689 // Emit a REX.W prefix if the operand size is 64 bits.
690 EmitRex64(dst, src);
691 } else {
692 EmitOptionalRex32(dst, src);
693 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700694 EmitUint8(0x0F);
695 EmitUint8(0x2A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700696 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700697}
698
699
Mark Mendell40741f32015-04-20 22:10:34 -0400700void X86_64Assembler::cvtsi2sd(XmmRegister dst, const Address& src, bool is64bit) {
701 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
702 EmitUint8(0xF2);
703 if (is64bit) {
704 // Emit a REX.W prefix if the operand size is 64 bits.
705 EmitRex64(dst, src);
706 } else {
707 EmitOptionalRex32(dst, src);
708 }
709 EmitUint8(0x0F);
710 EmitUint8(0x2A);
711 EmitOperand(dst.LowBits(), src);
712}
713
714
Ian Rogersdd7624d2014-03-14 17:43:00 -0700715void X86_64Assembler::cvtss2si(CpuRegister dst, XmmRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700716 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
717 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700718 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700719 EmitUint8(0x0F);
720 EmitUint8(0x2D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700721 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700722}
723
724
725void X86_64Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
726 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
727 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700728 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700729 EmitUint8(0x0F);
730 EmitUint8(0x5A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700731 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700732}
733
734
Mark Mendell40741f32015-04-20 22:10:34 -0400735void X86_64Assembler::cvtss2sd(XmmRegister dst, const Address& src) {
736 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
737 EmitUint8(0xF3);
738 EmitOptionalRex32(dst, src);
739 EmitUint8(0x0F);
740 EmitUint8(0x5A);
741 EmitOperand(dst.LowBits(), src);
742}
743
744
Ian Rogersdd7624d2014-03-14 17:43:00 -0700745void X86_64Assembler::cvtsd2si(CpuRegister dst, XmmRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700746 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
747 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700748 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700749 EmitUint8(0x0F);
750 EmitUint8(0x2D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700751 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700752}
753
754
Ian Rogersdd7624d2014-03-14 17:43:00 -0700755void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src) {
Roland Levillain624279f2014-12-04 11:54:28 +0000756 cvttss2si(dst, src, false);
757}
758
759
760void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700761 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
762 EmitUint8(0xF3);
Roland Levillain624279f2014-12-04 11:54:28 +0000763 if (is64bit) {
764 // Emit a REX.W prefix if the operand size is 64 bits.
765 EmitRex64(dst, src);
766 } else {
767 EmitOptionalRex32(dst, src);
768 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700769 EmitUint8(0x0F);
770 EmitUint8(0x2C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700771 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700772}
773
774
Ian Rogersdd7624d2014-03-14 17:43:00 -0700775void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src) {
Roland Levillain4c0b61f2014-12-05 12:06:01 +0000776 cvttsd2si(dst, src, false);
777}
778
779
780void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700781 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
782 EmitUint8(0xF2);
Roland Levillain4c0b61f2014-12-05 12:06:01 +0000783 if (is64bit) {
784 // Emit a REX.W prefix if the operand size is 64 bits.
785 EmitRex64(dst, src);
786 } else {
787 EmitOptionalRex32(dst, src);
788 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700789 EmitUint8(0x0F);
790 EmitUint8(0x2C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700791 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700792}
793
794
795void X86_64Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
796 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
797 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700798 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700799 EmitUint8(0x0F);
800 EmitUint8(0x5A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700801 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700802}
803
804
Mark Mendell40741f32015-04-20 22:10:34 -0400805void X86_64Assembler::cvtsd2ss(XmmRegister dst, const Address& src) {
806 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
807 EmitUint8(0xF2);
808 EmitOptionalRex32(dst, src);
809 EmitUint8(0x0F);
810 EmitUint8(0x5A);
811 EmitOperand(dst.LowBits(), src);
812}
813
814
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700815void X86_64Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
816 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
817 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700818 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700819 EmitUint8(0x0F);
820 EmitUint8(0xE6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700821 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700822}
823
824
825void X86_64Assembler::comiss(XmmRegister a, XmmRegister b) {
826 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700827 EmitOptionalRex32(a, b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700828 EmitUint8(0x0F);
829 EmitUint8(0x2F);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700830 EmitXmmRegisterOperand(a.LowBits(), b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700831}
832
833
Mark Mendell40741f32015-04-20 22:10:34 -0400834void X86_64Assembler::comiss(XmmRegister a, const Address& b) {
835 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
836 EmitOptionalRex32(a, b);
837 EmitUint8(0x0F);
838 EmitUint8(0x2F);
839 EmitOperand(a.LowBits(), b);
840}
841
842
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700843void X86_64Assembler::comisd(XmmRegister a, XmmRegister b) {
844 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
845 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700846 EmitOptionalRex32(a, b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700847 EmitUint8(0x0F);
848 EmitUint8(0x2F);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700849 EmitXmmRegisterOperand(a.LowBits(), b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700850}
851
Mark Mendell40741f32015-04-20 22:10:34 -0400852
853void X86_64Assembler::comisd(XmmRegister a, const Address& b) {
854 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
855 EmitUint8(0x66);
856 EmitOptionalRex32(a, b);
857 EmitUint8(0x0F);
858 EmitUint8(0x2F);
859 EmitOperand(a.LowBits(), b);
860}
861
862
Calin Juravleddb7df22014-11-25 20:56:51 +0000863void X86_64Assembler::ucomiss(XmmRegister a, XmmRegister b) {
864 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
865 EmitOptionalRex32(a, b);
866 EmitUint8(0x0F);
867 EmitUint8(0x2E);
868 EmitXmmRegisterOperand(a.LowBits(), b);
869}
870
871
Mark Mendell40741f32015-04-20 22:10:34 -0400872void X86_64Assembler::ucomiss(XmmRegister a, const Address& b) {
873 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
874 EmitOptionalRex32(a, b);
875 EmitUint8(0x0F);
876 EmitUint8(0x2E);
877 EmitOperand(a.LowBits(), b);
878}
879
880
Calin Juravleddb7df22014-11-25 20:56:51 +0000881void X86_64Assembler::ucomisd(XmmRegister a, XmmRegister b) {
882 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
883 EmitUint8(0x66);
884 EmitOptionalRex32(a, b);
885 EmitUint8(0x0F);
886 EmitUint8(0x2E);
887 EmitXmmRegisterOperand(a.LowBits(), b);
888}
889
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700890
Mark Mendell40741f32015-04-20 22:10:34 -0400891void X86_64Assembler::ucomisd(XmmRegister a, const Address& b) {
892 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
893 EmitUint8(0x66);
894 EmitOptionalRex32(a, b);
895 EmitUint8(0x0F);
896 EmitUint8(0x2E);
897 EmitOperand(a.LowBits(), b);
898}
899
900
Mark Mendellfb8d2792015-03-31 22:16:59 -0400901void X86_64Assembler::roundsd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
902 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
903 EmitUint8(0x66);
904 EmitOptionalRex32(dst, src);
905 EmitUint8(0x0F);
906 EmitUint8(0x3A);
907 EmitUint8(0x0B);
908 EmitXmmRegisterOperand(dst.LowBits(), src);
909 EmitUint8(imm.value());
910}
911
912
913void X86_64Assembler::roundss(XmmRegister dst, XmmRegister src, const Immediate& imm) {
914 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
915 EmitUint8(0x66);
916 EmitOptionalRex32(dst, src);
917 EmitUint8(0x0F);
918 EmitUint8(0x3A);
919 EmitUint8(0x0A);
920 EmitXmmRegisterOperand(dst.LowBits(), src);
921 EmitUint8(imm.value());
922}
923
924
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700925void X86_64Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
926 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
927 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700928 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700929 EmitUint8(0x0F);
930 EmitUint8(0x51);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700931 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700932}
933
934
935void X86_64Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
936 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
937 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700938 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700939 EmitUint8(0x0F);
940 EmitUint8(0x51);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700941 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700942}
943
944
945void X86_64Assembler::xorpd(XmmRegister dst, const Address& src) {
946 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
947 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700948 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700949 EmitUint8(0x0F);
950 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700951 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700952}
953
954
955void X86_64Assembler::xorpd(XmmRegister dst, XmmRegister src) {
956 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
957 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700958 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700959 EmitUint8(0x0F);
960 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700961 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700962}
963
964
965void X86_64Assembler::xorps(XmmRegister dst, const Address& src) {
966 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700967 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700968 EmitUint8(0x0F);
969 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700970 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700971}
972
973
974void X86_64Assembler::xorps(XmmRegister dst, XmmRegister src) {
975 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700976 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700977 EmitUint8(0x0F);
978 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700979 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700980}
981
982
983void X86_64Assembler::andpd(XmmRegister dst, const Address& src) {
984 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
985 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700986 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700987 EmitUint8(0x0F);
988 EmitUint8(0x54);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700989 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700990}
991
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800992void X86_64Assembler::andpd(XmmRegister dst, XmmRegister src) {
993 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
994 EmitUint8(0x66);
995 EmitOptionalRex32(dst, src);
996 EmitUint8(0x0F);
997 EmitUint8(0x54);
998 EmitXmmRegisterOperand(dst.LowBits(), src);
999}
1000
1001void X86_64Assembler::andps(XmmRegister dst, XmmRegister src) {
1002 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1003 EmitOptionalRex32(dst, src);
1004 EmitUint8(0x0F);
1005 EmitUint8(0x54);
1006 EmitXmmRegisterOperand(dst.LowBits(), src);
1007}
1008
1009void X86_64Assembler::orpd(XmmRegister dst, XmmRegister src) {
1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1011 EmitUint8(0x66);
1012 EmitOptionalRex32(dst, src);
1013 EmitUint8(0x0F);
1014 EmitUint8(0x56);
1015 EmitXmmRegisterOperand(dst.LowBits(), src);
1016}
1017
1018void X86_64Assembler::orps(XmmRegister dst, XmmRegister src) {
1019 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1020 EmitOptionalRex32(dst, src);
1021 EmitUint8(0x0F);
1022 EmitUint8(0x56);
1023 EmitXmmRegisterOperand(dst.LowBits(), src);
1024}
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001025
1026void X86_64Assembler::fldl(const Address& src) {
1027 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1028 EmitUint8(0xDD);
1029 EmitOperand(0, src);
1030}
1031
1032
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001033void X86_64Assembler::fstl(const Address& dst) {
1034 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1035 EmitUint8(0xDD);
1036 EmitOperand(2, dst);
1037}
1038
1039
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001040void X86_64Assembler::fstpl(const Address& dst) {
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 EmitUint8(0xDD);
1043 EmitOperand(3, dst);
1044}
1045
1046
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001047void X86_64Assembler::fstsw() {
1048 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1049 EmitUint8(0x9B);
1050 EmitUint8(0xDF);
1051 EmitUint8(0xE0);
1052}
1053
1054
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001055void X86_64Assembler::fnstcw(const Address& dst) {
1056 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1057 EmitUint8(0xD9);
1058 EmitOperand(7, dst);
1059}
1060
1061
1062void X86_64Assembler::fldcw(const Address& src) {
1063 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1064 EmitUint8(0xD9);
1065 EmitOperand(5, src);
1066}
1067
1068
1069void X86_64Assembler::fistpl(const Address& dst) {
1070 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1071 EmitUint8(0xDF);
1072 EmitOperand(7, dst);
1073}
1074
1075
1076void X86_64Assembler::fistps(const Address& dst) {
1077 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1078 EmitUint8(0xDB);
1079 EmitOperand(3, dst);
1080}
1081
1082
1083void X86_64Assembler::fildl(const Address& src) {
1084 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1085 EmitUint8(0xDF);
1086 EmitOperand(5, src);
1087}
1088
1089
Roland Levillain0a186012015-04-13 17:00:20 +01001090void X86_64Assembler::filds(const Address& src) {
1091 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1092 EmitUint8(0xDB);
1093 EmitOperand(0, src);
1094}
1095
1096
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001097void X86_64Assembler::fincstp() {
1098 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1099 EmitUint8(0xD9);
1100 EmitUint8(0xF7);
1101}
1102
1103
1104void X86_64Assembler::ffree(const Immediate& index) {
1105 CHECK_LT(index.value(), 7);
1106 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1107 EmitUint8(0xDD);
1108 EmitUint8(0xC0 + index.value());
1109}
1110
1111
1112void X86_64Assembler::fsin() {
1113 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1114 EmitUint8(0xD9);
1115 EmitUint8(0xFE);
1116}
1117
1118
1119void X86_64Assembler::fcos() {
1120 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1121 EmitUint8(0xD9);
1122 EmitUint8(0xFF);
1123}
1124
1125
1126void X86_64Assembler::fptan() {
1127 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1128 EmitUint8(0xD9);
1129 EmitUint8(0xF2);
1130}
1131
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001132void X86_64Assembler::fucompp() {
1133 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1134 EmitUint8(0xDA);
1135 EmitUint8(0xE9);
1136}
1137
1138
1139void X86_64Assembler::fprem() {
1140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1141 EmitUint8(0xD9);
1142 EmitUint8(0xF8);
1143}
1144
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001145
Ian Rogersdd7624d2014-03-14 17:43:00 -07001146void X86_64Assembler::xchgl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001147 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001148 // There is a short version for rax.
1149 // It's a bit awkward, as CpuRegister has a const field, so assignment and thus swapping doesn't
1150 // work.
1151 const bool src_rax = src.AsRegister() == RAX;
1152 const bool dst_rax = dst.AsRegister() == RAX;
1153 if (src_rax || dst_rax) {
1154 EmitOptionalRex32(src_rax ? dst : src);
1155 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits()));
1156 return;
1157 }
1158
1159 // General case.
1160 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001161 EmitUint8(0x87);
Andreas Gampe851df202014-11-12 14:05:46 -08001162 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001163}
1164
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001165
1166void X86_64Assembler::xchgq(CpuRegister dst, CpuRegister src) {
1167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001168 // There is a short version for rax.
1169 // It's a bit awkward, as CpuRegister has a const field, so assignment and thus swapping doesn't
1170 // work.
1171 const bool src_rax = src.AsRegister() == RAX;
1172 const bool dst_rax = dst.AsRegister() == RAX;
1173 if (src_rax || dst_rax) {
1174 // If src == target, emit a nop instead.
1175 if (src_rax && dst_rax) {
1176 EmitUint8(0x90);
1177 } else {
1178 EmitRex64(src_rax ? dst : src);
1179 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits()));
1180 }
1181 return;
1182 }
1183
1184 // General case.
1185 EmitRex64(src, dst);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001186 EmitUint8(0x87);
Andreas Gampe851df202014-11-12 14:05:46 -08001187 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001188}
1189
1190
Ian Rogersdd7624d2014-03-14 17:43:00 -07001191void X86_64Assembler::xchgl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001192 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001193 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001194 EmitUint8(0x87);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001195 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001196}
1197
1198
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001199void X86_64Assembler::cmpw(const Address& address, const Immediate& imm) {
1200 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1201 EmitOptionalRex32(address);
1202 EmitUint8(0x66);
1203 EmitComplex(7, address, imm);
1204}
1205
1206
Ian Rogersdd7624d2014-03-14 17:43:00 -07001207void X86_64Assembler::cmpl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001208 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001209 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001210 EmitComplex(7, Operand(reg), imm);
1211}
1212
1213
Ian Rogersdd7624d2014-03-14 17:43:00 -07001214void X86_64Assembler::cmpl(CpuRegister reg0, CpuRegister reg1) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001215 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001216 EmitOptionalRex32(reg0, reg1);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001217 EmitUint8(0x3B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001218 EmitOperand(reg0.LowBits(), Operand(reg1));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001219}
1220
1221
Ian Rogersdd7624d2014-03-14 17:43:00 -07001222void X86_64Assembler::cmpl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001223 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001224 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001225 EmitUint8(0x3B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001226 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001227}
1228
1229
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001230void X86_64Assembler::cmpl(const Address& address, CpuRegister reg) {
1231 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1232 EmitOptionalRex32(reg, address);
1233 EmitUint8(0x39);
1234 EmitOperand(reg.LowBits(), address);
1235}
1236
1237
1238void X86_64Assembler::cmpl(const Address& address, const Immediate& imm) {
1239 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1240 EmitOptionalRex32(address);
1241 EmitComplex(7, address, imm);
1242}
1243
1244
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001245void X86_64Assembler::cmpq(CpuRegister reg0, CpuRegister reg1) {
1246 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1247 EmitRex64(reg0, reg1);
1248 EmitUint8(0x3B);
1249 EmitOperand(reg0.LowBits(), Operand(reg1));
1250}
1251
1252
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001253void X86_64Assembler::cmpq(CpuRegister reg, const Immediate& imm) {
1254 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1255 CHECK(imm.is_int32()); // cmpq only supports 32b immediate.
1256 EmitRex64(reg);
1257 EmitComplex(7, Operand(reg), imm);
1258}
1259
1260
1261void X86_64Assembler::cmpq(CpuRegister reg, const Address& address) {
1262 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell40741f32015-04-20 22:10:34 -04001263 EmitRex64(reg, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001264 EmitUint8(0x3B);
1265 EmitOperand(reg.LowBits(), address);
1266}
1267
1268
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001269void X86_64Assembler::cmpq(const Address& address, const Immediate& imm) {
1270 CHECK(imm.is_int32()); // cmpq only supports 32b immediate.
1271 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1272 EmitRex64(address);
1273 EmitComplex(7, address, imm);
1274}
1275
1276
Ian Rogersdd7624d2014-03-14 17:43:00 -07001277void X86_64Assembler::addl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001278 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001279 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001280 EmitUint8(0x03);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001281 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001282}
1283
1284
Ian Rogersdd7624d2014-03-14 17:43:00 -07001285void X86_64Assembler::addl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001286 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001287 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001288 EmitUint8(0x03);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001289 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001290}
1291
1292
Ian Rogersdd7624d2014-03-14 17:43:00 -07001293void X86_64Assembler::testl(CpuRegister reg1, CpuRegister reg2) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001294 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001295 EmitOptionalRex32(reg1, reg2);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001296 EmitUint8(0x85);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001297 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001298}
1299
1300
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001301void X86_64Assembler::testl(CpuRegister reg, const Address& address) {
1302 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1303 EmitOptionalRex32(reg, address);
1304 EmitUint8(0x85);
1305 EmitOperand(reg.LowBits(), address);
1306}
1307
1308
Ian Rogersdd7624d2014-03-14 17:43:00 -07001309void X86_64Assembler::testl(CpuRegister reg, const Immediate& immediate) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001310 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1311 // For registers that have a byte variant (RAX, RBX, RCX, and RDX)
Ian Rogersdd7624d2014-03-14 17:43:00 -07001312 // we only test the byte CpuRegister to keep the encoding short.
1313 if (immediate.is_uint8() && reg.AsRegister() < 4) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001314 // Use zero-extended 8-bit immediate.
Ian Rogersdd7624d2014-03-14 17:43:00 -07001315 if (reg.AsRegister() == RAX) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001316 EmitUint8(0xA8);
1317 } else {
1318 EmitUint8(0xF6);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001319 EmitUint8(0xC0 + reg.AsRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001320 }
1321 EmitUint8(immediate.value() & 0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001322 } else if (reg.AsRegister() == RAX) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001323 // Use short form if the destination is RAX.
1324 EmitUint8(0xA9);
1325 EmitImmediate(immediate);
1326 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001327 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001328 EmitUint8(0xF7);
1329 EmitOperand(0, Operand(reg));
1330 EmitImmediate(immediate);
1331 }
1332}
1333
1334
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001335void X86_64Assembler::testq(CpuRegister reg1, CpuRegister reg2) {
1336 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1337 EmitRex64(reg1, reg2);
1338 EmitUint8(0x85);
1339 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits());
1340}
1341
1342
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001343void X86_64Assembler::testq(CpuRegister reg, const Address& address) {
1344 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001345 EmitRex64(reg, address);
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001346 EmitUint8(0x85);
1347 EmitOperand(reg.LowBits(), address);
1348}
1349
1350
Ian Rogersdd7624d2014-03-14 17:43:00 -07001351void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001352 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001353 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001354 EmitUint8(0x23);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001355 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001356}
1357
1358
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001359void X86_64Assembler::andl(CpuRegister reg, const Address& address) {
1360 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1361 EmitOptionalRex32(reg, address);
1362 EmitUint8(0x23);
1363 EmitOperand(reg.LowBits(), address);
1364}
1365
1366
Ian Rogersdd7624d2014-03-14 17:43:00 -07001367void X86_64Assembler::andl(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001368 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001369 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001370 EmitComplex(4, Operand(dst), imm);
1371}
1372
1373
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001374void X86_64Assembler::andq(CpuRegister reg, const Immediate& imm) {
1375 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1376 CHECK(imm.is_int32()); // andq only supports 32b immediate.
1377 EmitRex64(reg);
1378 EmitComplex(4, Operand(reg), imm);
1379}
1380
1381
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001382void X86_64Assembler::andq(CpuRegister dst, CpuRegister src) {
1383 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1384 EmitRex64(dst, src);
1385 EmitUint8(0x23);
1386 EmitOperand(dst.LowBits(), Operand(src));
1387}
1388
1389
Mark Mendell40741f32015-04-20 22:10:34 -04001390void X86_64Assembler::andq(CpuRegister dst, const Address& src) {
1391 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1392 EmitRex64(dst, src);
1393 EmitUint8(0x23);
1394 EmitOperand(dst.LowBits(), src);
1395}
1396
1397
Ian Rogersdd7624d2014-03-14 17:43:00 -07001398void X86_64Assembler::orl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001399 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001400 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001401 EmitUint8(0x0B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001402 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001403}
1404
1405
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001406void X86_64Assembler::orl(CpuRegister reg, const Address& address) {
1407 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1408 EmitOptionalRex32(reg, address);
1409 EmitUint8(0x0B);
1410 EmitOperand(reg.LowBits(), address);
1411}
1412
1413
Ian Rogersdd7624d2014-03-14 17:43:00 -07001414void X86_64Assembler::orl(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001415 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001416 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001417 EmitComplex(1, Operand(dst), imm);
1418}
1419
1420
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001421void X86_64Assembler::orq(CpuRegister dst, const Immediate& imm) {
1422 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1423 CHECK(imm.is_int32()); // orq only supports 32b immediate.
1424 EmitRex64(dst);
1425 EmitComplex(1, Operand(dst), imm);
1426}
1427
1428
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001429void X86_64Assembler::orq(CpuRegister dst, CpuRegister src) {
1430 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1431 EmitRex64(dst, src);
1432 EmitUint8(0x0B);
1433 EmitOperand(dst.LowBits(), Operand(src));
1434}
1435
1436
Mark Mendell40741f32015-04-20 22:10:34 -04001437void X86_64Assembler::orq(CpuRegister dst, const Address& src) {
1438 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1439 EmitRex64(dst, src);
1440 EmitUint8(0x0B);
1441 EmitOperand(dst.LowBits(), src);
1442}
1443
1444
Ian Rogersdd7624d2014-03-14 17:43:00 -07001445void X86_64Assembler::xorl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001446 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001447 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001448 EmitUint8(0x33);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001449 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001450}
1451
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001452
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001453void X86_64Assembler::xorl(CpuRegister reg, const Address& address) {
1454 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1455 EmitOptionalRex32(reg, address);
1456 EmitUint8(0x33);
1457 EmitOperand(reg.LowBits(), address);
1458}
1459
1460
1461void X86_64Assembler::xorl(CpuRegister dst, const Immediate& imm) {
1462 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1463 EmitOptionalRex32(dst);
1464 EmitComplex(6, Operand(dst), imm);
1465}
1466
1467
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001468void X86_64Assembler::xorq(CpuRegister dst, CpuRegister src) {
1469 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1470 EmitRex64(dst, src);
1471 EmitUint8(0x33);
1472 EmitOperand(dst.LowBits(), Operand(src));
1473}
1474
1475
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001476void X86_64Assembler::xorq(CpuRegister dst, const Immediate& imm) {
1477 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1478 CHECK(imm.is_int32()); // xorq only supports 32b immediate.
1479 EmitRex64(dst);
1480 EmitComplex(6, Operand(dst), imm);
1481}
1482
Mark Mendell40741f32015-04-20 22:10:34 -04001483void X86_64Assembler::xorq(CpuRegister dst, const Address& src) {
1484 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1485 EmitRex64(dst, src);
1486 EmitUint8(0x33);
1487 EmitOperand(dst.LowBits(), src);
1488}
1489
1490
Ian Rogersdd7624d2014-03-14 17:43:00 -07001491#if 0
1492void X86_64Assembler::rex(bool force, bool w, Register* r, Register* x, Register* b) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001493 // REX.WRXB
1494 // W - 64-bit operand
1495 // R - MODRM.reg
1496 // X - SIB.index
1497 // B - MODRM.rm/SIB.base
Ian Rogersdd7624d2014-03-14 17:43:00 -07001498 uint8_t rex = force ? 0x40 : 0;
1499 if (w) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001500 rex |= 0x48; // REX.W000
1501 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001502 if (r != nullptr && *r >= Register::R8 && *r < Register::kNumberOfCpuRegisters) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001503 rex |= 0x44; // REX.0R00
Ian Rogersdd7624d2014-03-14 17:43:00 -07001504 *r = static_cast<Register>(*r - 8);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001505 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001506 if (x != nullptr && *x >= Register::R8 && *x < Register::kNumberOfCpuRegisters) {
1507 rex |= 0x42; // REX.00X0
1508 *x = static_cast<Register>(*x - 8);
1509 }
1510 if (b != nullptr && *b >= Register::R8 && *b < Register::kNumberOfCpuRegisters) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001511 rex |= 0x41; // REX.000B
Ian Rogersdd7624d2014-03-14 17:43:00 -07001512 *b = static_cast<Register>(*b - 8);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001513 }
1514 if (rex != 0) {
1515 EmitUint8(rex);
1516 }
1517}
1518
Ian Rogersdd7624d2014-03-14 17:43:00 -07001519void X86_64Assembler::rex_reg_mem(bool force, bool w, Register* dst, const Address& mem) {
1520 // REX.WRXB
1521 // W - 64-bit operand
1522 // R - MODRM.reg
1523 // X - SIB.index
1524 // B - MODRM.rm/SIB.base
1525 uint8_t rex = mem->rex();
1526 if (force) {
1527 rex |= 0x40; // REX.0000
1528 }
1529 if (w) {
1530 rex |= 0x48; // REX.W000
1531 }
1532 if (dst != nullptr && *dst >= Register::R8 && *dst < Register::kNumberOfCpuRegisters) {
1533 rex |= 0x44; // REX.0R00
1534 *dst = static_cast<Register>(*dst - 8);
1535 }
1536 if (rex != 0) {
1537 EmitUint8(rex);
1538 }
1539}
1540
1541void rex_mem_reg(bool force, bool w, Address* mem, Register* src);
1542#endif
1543
1544void X86_64Assembler::addl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001545 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001546 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001547 EmitComplex(0, Operand(reg), imm);
1548}
1549
1550
Ian Rogersdd7624d2014-03-14 17:43:00 -07001551void X86_64Assembler::addq(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001552 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001553 CHECK(imm.is_int32()); // addq only supports 32b immediate.
Ian Rogersdd7624d2014-03-14 17:43:00 -07001554 EmitRex64(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001555 EmitComplex(0, Operand(reg), imm);
1556}
1557
1558
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001559void X86_64Assembler::addq(CpuRegister dst, const Address& address) {
1560 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001561 EmitRex64(dst, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001562 EmitUint8(0x03);
1563 EmitOperand(dst.LowBits(), address);
1564}
1565
1566
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001567void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) {
1568 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1569 // 0x01 is addq r/m64 <- r/m64 + r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
1570 EmitRex64(src, dst);
1571 EmitUint8(0x01);
1572 EmitRegisterOperand(src.LowBits(), dst.LowBits());
1573}
1574
1575
Ian Rogersdd7624d2014-03-14 17:43:00 -07001576void X86_64Assembler::addl(const Address& address, CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001577 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001578 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001579 EmitUint8(0x01);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001580 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001581}
1582
1583
1584void X86_64Assembler::addl(const Address& address, const Immediate& imm) {
1585 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001586 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001587 EmitComplex(0, address, imm);
1588}
1589
1590
Ian Rogersdd7624d2014-03-14 17:43:00 -07001591void X86_64Assembler::subl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001592 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001593 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001594 EmitUint8(0x2B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001595 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001596}
1597
1598
Ian Rogersdd7624d2014-03-14 17:43:00 -07001599void X86_64Assembler::subl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001600 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001601 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001602 EmitComplex(5, Operand(reg), imm);
1603}
1604
1605
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001606void X86_64Assembler::subq(CpuRegister reg, const Immediate& imm) {
1607 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1608 CHECK(imm.is_int32()); // subq only supports 32b immediate.
1609 EmitRex64(reg);
1610 EmitComplex(5, Operand(reg), imm);
1611}
1612
1613
1614void X86_64Assembler::subq(CpuRegister dst, CpuRegister src) {
1615 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1616 EmitRex64(dst, src);
1617 EmitUint8(0x2B);
1618 EmitRegisterOperand(dst.LowBits(), src.LowBits());
1619}
1620
1621
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001622void X86_64Assembler::subq(CpuRegister reg, const Address& address) {
1623 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001624 EmitRex64(reg, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001625 EmitUint8(0x2B);
1626 EmitOperand(reg.LowBits() & 7, address);
1627}
1628
1629
Ian Rogersdd7624d2014-03-14 17:43:00 -07001630void X86_64Assembler::subl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001631 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001632 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001633 EmitUint8(0x2B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001634 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001635}
1636
1637
1638void X86_64Assembler::cdq() {
1639 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1640 EmitUint8(0x99);
1641}
1642
1643
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001644void X86_64Assembler::cqo() {
1645 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1646 EmitRex64();
1647 EmitUint8(0x99);
1648}
1649
1650
Ian Rogersdd7624d2014-03-14 17:43:00 -07001651void X86_64Assembler::idivl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001652 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001653 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001654 EmitUint8(0xF7);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001655 EmitUint8(0xF8 | reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001656}
1657
1658
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001659void X86_64Assembler::idivq(CpuRegister reg) {
1660 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1661 EmitRex64(reg);
1662 EmitUint8(0xF7);
1663 EmitUint8(0xF8 | reg.LowBits());
1664}
1665
1666
Ian Rogersdd7624d2014-03-14 17:43:00 -07001667void X86_64Assembler::imull(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001668 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001669 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001670 EmitUint8(0x0F);
1671 EmitUint8(0xAF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001672 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001673}
1674
Ian Rogersdd7624d2014-03-14 17:43:00 -07001675void X86_64Assembler::imull(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001676 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001677 CHECK(imm.is_int32()); // imull only supports 32b immediate.
1678
Nicolas Geoffrayb5de00f2014-10-24 15:43:49 +01001679 EmitOptionalRex32(reg, reg);
Andreas Gampe851df202014-11-12 14:05:46 -08001680
1681 // See whether imm can be represented as a sign-extended 8bit value.
1682 int32_t v32 = static_cast<int32_t>(imm.value());
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001683 if (IsInt<8>(v32)) {
Andreas Gampe851df202014-11-12 14:05:46 -08001684 // Sign-extension works.
1685 EmitUint8(0x6B);
1686 EmitOperand(reg.LowBits(), Operand(reg));
1687 EmitUint8(static_cast<uint8_t>(v32 & 0xFF));
1688 } else {
1689 // Not representable, use full immediate.
1690 EmitUint8(0x69);
1691 EmitOperand(reg.LowBits(), Operand(reg));
1692 EmitImmediate(imm);
1693 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001694}
1695
1696
Ian Rogersdd7624d2014-03-14 17:43:00 -07001697void X86_64Assembler::imull(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001698 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001699 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001700 EmitUint8(0x0F);
1701 EmitUint8(0xAF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001702 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001703}
1704
1705
Calin Juravle34bacdf2014-10-07 20:23:36 +01001706void X86_64Assembler::imulq(CpuRegister dst, CpuRegister src) {
1707 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1708 EmitRex64(dst, src);
1709 EmitUint8(0x0F);
1710 EmitUint8(0xAF);
1711 EmitRegisterOperand(dst.LowBits(), src.LowBits());
1712}
1713
1714
1715void X86_64Assembler::imulq(CpuRegister reg, const Immediate& imm) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001716 imulq(reg, reg, imm);
1717}
1718
1719void X86_64Assembler::imulq(CpuRegister dst, CpuRegister reg, const Immediate& imm) {
Calin Juravle34bacdf2014-10-07 20:23:36 +01001720 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1721 CHECK(imm.is_int32()); // imulq only supports 32b immediate.
Andreas Gampe851df202014-11-12 14:05:46 -08001722
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001723 EmitRex64(dst, reg);
Andreas Gampe851df202014-11-12 14:05:46 -08001724
1725 // See whether imm can be represented as a sign-extended 8bit value.
1726 int64_t v64 = imm.value();
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001727 if (IsInt<8>(v64)) {
Andreas Gampe851df202014-11-12 14:05:46 -08001728 // Sign-extension works.
1729 EmitUint8(0x6B);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001730 EmitOperand(dst.LowBits(), Operand(reg));
Andreas Gampe851df202014-11-12 14:05:46 -08001731 EmitUint8(static_cast<uint8_t>(v64 & 0xFF));
1732 } else {
1733 // Not representable, use full immediate.
1734 EmitUint8(0x69);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001735 EmitOperand(dst.LowBits(), Operand(reg));
Andreas Gampe851df202014-11-12 14:05:46 -08001736 EmitImmediate(imm);
1737 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01001738}
1739
Calin Juravle34bacdf2014-10-07 20:23:36 +01001740void X86_64Assembler::imulq(CpuRegister reg, const Address& address) {
1741 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1742 EmitRex64(reg, address);
1743 EmitUint8(0x0F);
1744 EmitUint8(0xAF);
1745 EmitOperand(reg.LowBits(), address);
1746}
1747
1748
Ian Rogersdd7624d2014-03-14 17:43:00 -07001749void X86_64Assembler::imull(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001750 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001751 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001752 EmitUint8(0xF7);
1753 EmitOperand(5, Operand(reg));
1754}
1755
1756
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01001757void X86_64Assembler::imulq(CpuRegister reg) {
1758 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1759 EmitRex64(reg);
1760 EmitUint8(0xF7);
1761 EmitOperand(5, Operand(reg));
1762}
1763
1764
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001765void X86_64Assembler::imull(const Address& address) {
1766 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001767 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001768 EmitUint8(0xF7);
1769 EmitOperand(5, address);
1770}
1771
1772
Ian Rogersdd7624d2014-03-14 17:43:00 -07001773void X86_64Assembler::mull(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001774 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001775 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001776 EmitUint8(0xF7);
1777 EmitOperand(4, Operand(reg));
1778}
1779
1780
1781void X86_64Assembler::mull(const Address& address) {
1782 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001783 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001784 EmitUint8(0xF7);
1785 EmitOperand(4, address);
1786}
1787
1788
Ian Rogersdd7624d2014-03-14 17:43:00 -07001789void X86_64Assembler::shll(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001790 EmitGenericShift(false, 4, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001791}
1792
1793
Calin Juravle9aec02f2014-11-18 23:06:35 +00001794void X86_64Assembler::shlq(CpuRegister reg, const Immediate& imm) {
1795 EmitGenericShift(true, 4, reg, imm);
1796}
1797
1798
Ian Rogersdd7624d2014-03-14 17:43:00 -07001799void X86_64Assembler::shll(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001800 EmitGenericShift(false, 4, operand, shifter);
1801}
1802
1803
1804void X86_64Assembler::shlq(CpuRegister operand, CpuRegister shifter) {
1805 EmitGenericShift(true, 4, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001806}
1807
1808
Ian Rogersdd7624d2014-03-14 17:43:00 -07001809void X86_64Assembler::shrl(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001810 EmitGenericShift(false, 5, reg, imm);
1811}
1812
1813
1814void X86_64Assembler::shrq(CpuRegister reg, const Immediate& imm) {
1815 EmitGenericShift(true, 5, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001816}
1817
1818
Ian Rogersdd7624d2014-03-14 17:43:00 -07001819void X86_64Assembler::shrl(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001820 EmitGenericShift(false, 5, operand, shifter);
1821}
1822
1823
1824void X86_64Assembler::shrq(CpuRegister operand, CpuRegister shifter) {
1825 EmitGenericShift(true, 5, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001826}
1827
1828
Ian Rogersdd7624d2014-03-14 17:43:00 -07001829void X86_64Assembler::sarl(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001830 EmitGenericShift(false, 7, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001831}
1832
1833
Ian Rogersdd7624d2014-03-14 17:43:00 -07001834void X86_64Assembler::sarl(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001835 EmitGenericShift(false, 7, operand, shifter);
1836}
1837
1838
1839void X86_64Assembler::sarq(CpuRegister reg, const Immediate& imm) {
1840 EmitGenericShift(true, 7, reg, imm);
1841}
1842
1843
1844void X86_64Assembler::sarq(CpuRegister operand, CpuRegister shifter) {
1845 EmitGenericShift(true, 7, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001846}
1847
1848
Ian Rogersdd7624d2014-03-14 17:43:00 -07001849void X86_64Assembler::negl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001850 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001851 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001852 EmitUint8(0xF7);
1853 EmitOperand(3, Operand(reg));
1854}
1855
Roland Levillain70566432014-10-24 16:20:17 +01001856
Roland Levillain2e07b4f2014-10-23 18:12:09 +01001857void X86_64Assembler::negq(CpuRegister reg) {
1858 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1859 EmitRex64(reg);
1860 EmitUint8(0xF7);
1861 EmitOperand(3, Operand(reg));
1862}
1863
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001864
Ian Rogersdd7624d2014-03-14 17:43:00 -07001865void X86_64Assembler::notl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001866 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001867 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001868 EmitUint8(0xF7);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001869 EmitUint8(0xD0 | reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001870}
1871
1872
Roland Levillain70566432014-10-24 16:20:17 +01001873void X86_64Assembler::notq(CpuRegister reg) {
1874 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1875 EmitRex64(reg);
1876 EmitUint8(0xF7);
1877 EmitOperand(2, Operand(reg));
1878}
1879
1880
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001881void X86_64Assembler::enter(const Immediate& imm) {
1882 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1883 EmitUint8(0xC8);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001884 CHECK(imm.is_uint16()) << imm.value();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001885 EmitUint8(imm.value() & 0xFF);
1886 EmitUint8((imm.value() >> 8) & 0xFF);
1887 EmitUint8(0x00);
1888}
1889
1890
1891void X86_64Assembler::leave() {
1892 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1893 EmitUint8(0xC9);
1894}
1895
1896
1897void X86_64Assembler::ret() {
1898 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1899 EmitUint8(0xC3);
1900}
1901
1902
1903void X86_64Assembler::ret(const Immediate& imm) {
1904 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1905 EmitUint8(0xC2);
1906 CHECK(imm.is_uint16());
1907 EmitUint8(imm.value() & 0xFF);
1908 EmitUint8((imm.value() >> 8) & 0xFF);
1909}
1910
1911
1912
1913void X86_64Assembler::nop() {
1914 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1915 EmitUint8(0x90);
1916}
1917
1918
1919void X86_64Assembler::int3() {
1920 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1921 EmitUint8(0xCC);
1922}
1923
1924
1925void X86_64Assembler::hlt() {
1926 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1927 EmitUint8(0xF4);
1928}
1929
1930
1931void X86_64Assembler::j(Condition condition, Label* label) {
1932 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1933 if (label->IsBound()) {
1934 static const int kShortSize = 2;
1935 static const int kLongSize = 6;
1936 int offset = label->Position() - buffer_.Size();
1937 CHECK_LE(offset, 0);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001938 if (IsInt<8>(offset - kShortSize)) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001939 EmitUint8(0x70 + condition);
1940 EmitUint8((offset - kShortSize) & 0xFF);
1941 } else {
1942 EmitUint8(0x0F);
1943 EmitUint8(0x80 + condition);
1944 EmitInt32(offset - kLongSize);
1945 }
1946 } else {
1947 EmitUint8(0x0F);
1948 EmitUint8(0x80 + condition);
1949 EmitLabelLink(label);
1950 }
1951}
1952
1953
Ian Rogersdd7624d2014-03-14 17:43:00 -07001954void X86_64Assembler::jmp(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001955 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001956 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001957 EmitUint8(0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001958 EmitRegisterOperand(4, reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001959}
1960
1961void X86_64Assembler::jmp(const Address& address) {
1962 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001963 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001964 EmitUint8(0xFF);
1965 EmitOperand(4, address);
1966}
1967
1968void X86_64Assembler::jmp(Label* label) {
1969 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1970 if (label->IsBound()) {
1971 static const int kShortSize = 2;
1972 static const int kLongSize = 5;
1973 int offset = label->Position() - buffer_.Size();
1974 CHECK_LE(offset, 0);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001975 if (IsInt<8>(offset - kShortSize)) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001976 EmitUint8(0xEB);
1977 EmitUint8((offset - kShortSize) & 0xFF);
1978 } else {
1979 EmitUint8(0xE9);
1980 EmitInt32(offset - kLongSize);
1981 }
1982 } else {
1983 EmitUint8(0xE9);
1984 EmitLabelLink(label);
1985 }
1986}
1987
1988
1989X86_64Assembler* X86_64Assembler::lock() {
1990 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1991 EmitUint8(0xF0);
1992 return this;
1993}
1994
1995
Ian Rogersdd7624d2014-03-14 17:43:00 -07001996void X86_64Assembler::cmpxchgl(const Address& address, CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001997 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001998 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001999 EmitUint8(0x0F);
2000 EmitUint8(0xB1);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002001 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002002}
2003
Mark Mendell58d25fd2015-04-03 14:52:31 -04002004
2005void X86_64Assembler::cmpxchgq(const Address& address, CpuRegister reg) {
2006 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2007 EmitRex64(reg, address);
2008 EmitUint8(0x0F);
2009 EmitUint8(0xB1);
2010 EmitOperand(reg.LowBits(), address);
2011}
2012
2013
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002014void X86_64Assembler::mfence() {
2015 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2016 EmitUint8(0x0F);
2017 EmitUint8(0xAE);
2018 EmitUint8(0xF0);
2019}
2020
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002021
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002022X86_64Assembler* X86_64Assembler::gs() {
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002023 // TODO: gs is a prefix and not an instruction
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002024 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2025 EmitUint8(0x65);
2026 return this;
2027}
2028
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002029
Ian Rogersdd7624d2014-03-14 17:43:00 -07002030void X86_64Assembler::AddImmediate(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002031 int value = imm.value();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002032 if (value != 0) {
2033 if (value > 0) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002034 addl(reg, imm);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002035 } else {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002036 subl(reg, Immediate(value));
2037 }
2038 }
2039}
2040
2041
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002042void X86_64Assembler::setcc(Condition condition, CpuRegister dst) {
2043 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2044 // RSP, RBP, RDI, RSI need rex prefix (else the pattern encodes ah/bh/ch/dh).
2045 if (dst.NeedsRex() || dst.AsRegister() > 3) {
2046 EmitOptionalRex(true, false, false, false, dst.NeedsRex());
2047 }
2048 EmitUint8(0x0F);
2049 EmitUint8(0x90 + condition);
2050 EmitUint8(0xC0 + dst.LowBits());
2051}
2052
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002053void X86_64Assembler::bswapl(CpuRegister dst) {
2054 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2055 EmitOptionalRex(false, false, false, false, dst.NeedsRex());
2056 EmitUint8(0x0F);
2057 EmitUint8(0xC8 + dst.LowBits());
2058}
2059
2060void X86_64Assembler::bswapq(CpuRegister dst) {
2061 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2062 EmitOptionalRex(false, true, false, false, dst.NeedsRex());
2063 EmitUint8(0x0F);
2064 EmitUint8(0xC8 + dst.LowBits());
2065}
2066
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002067
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002068void X86_64Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
2069 // TODO: Need to have a code constants table.
2070 int64_t constant = bit_cast<int64_t, double>(value);
2071 pushq(Immediate(High32Bits(constant)));
2072 pushq(Immediate(Low32Bits(constant)));
Ian Rogersdd7624d2014-03-14 17:43:00 -07002073 movsd(dst, Address(CpuRegister(RSP), 0));
Ian Rogers13735952014-10-08 12:43:28 -07002074 addq(CpuRegister(RSP), Immediate(2 * sizeof(intptr_t)));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002075}
2076
2077
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002078void X86_64Assembler::Align(int alignment, int offset) {
2079 CHECK(IsPowerOfTwo(alignment));
2080 // Emit nop instruction until the real position is aligned.
2081 while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
2082 nop();
2083 }
2084}
2085
2086
2087void X86_64Assembler::Bind(Label* label) {
2088 int bound = buffer_.Size();
2089 CHECK(!label->IsBound()); // Labels can only be bound once.
2090 while (label->IsLinked()) {
2091 int position = label->LinkPosition();
2092 int next = buffer_.Load<int32_t>(position);
2093 buffer_.Store<int32_t>(position, bound - (position + 4));
2094 label->position_ = next;
2095 }
2096 label->BindTo(bound);
2097}
2098
2099
Ian Rogersdd7624d2014-03-14 17:43:00 -07002100void X86_64Assembler::EmitOperand(uint8_t reg_or_opcode, const Operand& operand) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002101 CHECK_GE(reg_or_opcode, 0);
2102 CHECK_LT(reg_or_opcode, 8);
2103 const int length = operand.length_;
2104 CHECK_GT(length, 0);
2105 // Emit the ModRM byte updated with the given reg value.
2106 CHECK_EQ(operand.encoding_[0] & 0x38, 0);
2107 EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
2108 // Emit the rest of the encoded operand.
2109 for (int i = 1; i < length; i++) {
2110 EmitUint8(operand.encoding_[i]);
2111 }
Mark Mendellf55c3e02015-03-26 21:07:46 -04002112 AssemblerFixup* fixup = operand.GetFixup();
2113 if (fixup != nullptr) {
2114 EmitFixup(fixup);
2115 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002116}
2117
2118
2119void X86_64Assembler::EmitImmediate(const Immediate& imm) {
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002120 if (imm.is_int32()) {
2121 EmitInt32(static_cast<int32_t>(imm.value()));
2122 } else {
2123 EmitInt64(imm.value());
2124 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002125}
2126
2127
Ian Rogersdd7624d2014-03-14 17:43:00 -07002128void X86_64Assembler::EmitComplex(uint8_t reg_or_opcode,
2129 const Operand& operand,
2130 const Immediate& immediate) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002131 CHECK_GE(reg_or_opcode, 0);
2132 CHECK_LT(reg_or_opcode, 8);
2133 if (immediate.is_int8()) {
2134 // Use sign-extended 8-bit immediate.
2135 EmitUint8(0x83);
2136 EmitOperand(reg_or_opcode, operand);
2137 EmitUint8(immediate.value() & 0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002138 } else if (operand.IsRegister(CpuRegister(RAX))) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002139 // Use short form if the destination is eax.
2140 EmitUint8(0x05 + (reg_or_opcode << 3));
2141 EmitImmediate(immediate);
2142 } else {
2143 EmitUint8(0x81);
2144 EmitOperand(reg_or_opcode, operand);
2145 EmitImmediate(immediate);
2146 }
2147}
2148
2149
2150void X86_64Assembler::EmitLabel(Label* label, int instruction_size) {
2151 if (label->IsBound()) {
2152 int offset = label->Position() - buffer_.Size();
2153 CHECK_LE(offset, 0);
2154 EmitInt32(offset - instruction_size);
2155 } else {
2156 EmitLabelLink(label);
2157 }
2158}
2159
2160
2161void X86_64Assembler::EmitLabelLink(Label* label) {
2162 CHECK(!label->IsBound());
2163 int position = buffer_.Size();
2164 EmitInt32(label->position_);
2165 label->LinkTo(position);
2166}
2167
2168
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002169void X86_64Assembler::EmitGenericShift(bool wide,
2170 int reg_or_opcode,
2171 CpuRegister reg,
2172 const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002173 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2174 CHECK(imm.is_int8());
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002175 if (wide) {
2176 EmitRex64(reg);
Andreas Gampe851df202014-11-12 14:05:46 -08002177 } else {
2178 EmitOptionalRex32(reg);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002179 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002180 if (imm.value() == 1) {
2181 EmitUint8(0xD1);
2182 EmitOperand(reg_or_opcode, Operand(reg));
2183 } else {
2184 EmitUint8(0xC1);
2185 EmitOperand(reg_or_opcode, Operand(reg));
2186 EmitUint8(imm.value() & 0xFF);
2187 }
2188}
2189
2190
Calin Juravle9aec02f2014-11-18 23:06:35 +00002191void X86_64Assembler::EmitGenericShift(bool wide,
2192 int reg_or_opcode,
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002193 CpuRegister operand,
2194 CpuRegister shifter) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002195 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002196 CHECK_EQ(shifter.AsRegister(), RCX);
Calin Juravle9aec02f2014-11-18 23:06:35 +00002197 if (wide) {
2198 EmitRex64(operand);
2199 } else {
2200 EmitOptionalRex32(operand);
2201 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002202 EmitUint8(0xD3);
2203 EmitOperand(reg_or_opcode, Operand(operand));
2204}
2205
Ian Rogersdd7624d2014-03-14 17:43:00 -07002206void X86_64Assembler::EmitOptionalRex(bool force, bool w, bool r, bool x, bool b) {
2207 // REX.WRXB
2208 // W - 64-bit operand
2209 // R - MODRM.reg
2210 // X - SIB.index
2211 // B - MODRM.rm/SIB.base
2212 uint8_t rex = force ? 0x40 : 0;
2213 if (w) {
2214 rex |= 0x48; // REX.W000
2215 }
2216 if (r) {
2217 rex |= 0x44; // REX.0R00
2218 }
2219 if (x) {
2220 rex |= 0x42; // REX.00X0
2221 }
2222 if (b) {
2223 rex |= 0x41; // REX.000B
2224 }
2225 if (rex != 0) {
2226 EmitUint8(rex);
2227 }
2228}
2229
2230void X86_64Assembler::EmitOptionalRex32(CpuRegister reg) {
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002231 EmitOptionalRex(false, false, false, false, reg.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002232}
2233
2234void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, CpuRegister src) {
2235 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2236}
2237
2238void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, XmmRegister src) {
2239 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2240}
2241
2242void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, XmmRegister src) {
2243 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2244}
2245
2246void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, CpuRegister src) {
2247 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2248}
2249
2250void X86_64Assembler::EmitOptionalRex32(const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002251 uint8_t rex = operand.rex();
2252 if (rex != 0) {
2253 EmitUint8(rex);
2254 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002255}
2256
2257void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002258 uint8_t rex = operand.rex();
2259 if (dst.NeedsRex()) {
2260 rex |= 0x44; // REX.0R00
2261 }
2262 if (rex != 0) {
2263 EmitUint8(rex);
2264 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002265}
2266
2267void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002268 uint8_t rex = operand.rex();
2269 if (dst.NeedsRex()) {
2270 rex |= 0x44; // REX.0R00
2271 }
2272 if (rex != 0) {
2273 EmitUint8(rex);
2274 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002275}
2276
Calin Juravled6fb6cf2014-11-11 19:07:44 +00002277void X86_64Assembler::EmitRex64() {
2278 EmitOptionalRex(false, true, false, false, false);
2279}
2280
Ian Rogersdd7624d2014-03-14 17:43:00 -07002281void X86_64Assembler::EmitRex64(CpuRegister reg) {
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002282 EmitOptionalRex(false, true, false, false, reg.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002283}
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002284
Calin Juravled6fb6cf2014-11-11 19:07:44 +00002285void X86_64Assembler::EmitRex64(const Operand& operand) {
2286 uint8_t rex = operand.rex();
2287 rex |= 0x48; // REX.W000
2288 EmitUint8(rex);
2289}
2290
Ian Rogersdd7624d2014-03-14 17:43:00 -07002291void X86_64Assembler::EmitRex64(CpuRegister dst, CpuRegister src) {
2292 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2293}
2294
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002295void X86_64Assembler::EmitRex64(XmmRegister dst, CpuRegister src) {
2296 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2297}
2298
Roland Levillain624279f2014-12-04 11:54:28 +00002299void X86_64Assembler::EmitRex64(CpuRegister dst, XmmRegister src) {
2300 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2301}
2302
Ian Rogersdd7624d2014-03-14 17:43:00 -07002303void X86_64Assembler::EmitRex64(CpuRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002304 uint8_t rex = 0x48 | operand.rex(); // REX.W000
2305 if (dst.NeedsRex()) {
2306 rex |= 0x44; // REX.0R00
2307 }
Mark Mendell40741f32015-04-20 22:10:34 -04002308 EmitUint8(rex);
2309}
2310
2311void X86_64Assembler::EmitRex64(XmmRegister dst, const Operand& operand) {
2312 uint8_t rex = 0x48 | operand.rex(); // REX.W000
2313 if (dst.NeedsRex()) {
2314 rex |= 0x44; // REX.0R00
Ian Rogers790a6b72014-04-01 10:36:00 -07002315 }
Mark Mendell40741f32015-04-20 22:10:34 -04002316 EmitUint8(rex);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002317}
2318
2319void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, CpuRegister src) {
Chao-ying Fud23840d2015-04-07 16:03:04 -07002320 // For src, SPL, BPL, SIL, DIL need the rex prefix.
2321 bool force = src.AsRegister() > 3;
2322 EmitOptionalRex(force, false, dst.NeedsRex(), false, src.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002323}
2324
2325void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, const Operand& operand) {
Chao-ying Fud23840d2015-04-07 16:03:04 -07002326 uint8_t rex = operand.rex();
2327 // For dst, SPL, BPL, SIL, DIL need the rex prefix.
2328 bool force = dst.AsRegister() > 3;
2329 if (force) {
2330 rex |= 0x40; // REX.0000
2331 }
Ian Rogers790a6b72014-04-01 10:36:00 -07002332 if (dst.NeedsRex()) {
2333 rex |= 0x44; // REX.0R00
2334 }
2335 if (rex != 0) {
2336 EmitUint8(rex);
2337 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002338}
2339
David Srbeckydd973932015-04-07 20:29:48 +01002340static dwarf::Reg DWARFReg(Register reg) {
2341 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
2342}
2343static dwarf::Reg DWARFReg(FloatRegister reg) {
2344 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
2345}
2346
Ian Rogers790a6b72014-04-01 10:36:00 -07002347constexpr size_t kFramePointerSize = 8;
2348
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002349void X86_64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
Ian Rogers790a6b72014-04-01 10:36:00 -07002350 const std::vector<ManagedRegister>& spill_regs,
2351 const ManagedRegisterEntrySpills& entry_spills) {
David Srbecky8c578312015-04-07 19:46:22 +01002352 DCHECK_EQ(buffer_.Size(), 0U); // Nothing emitted yet.
David Srbeckydd973932015-04-07 20:29:48 +01002353 cfi_.SetCurrentCFAOffset(8); // Return address on stack.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002354 CHECK_ALIGNED(frame_size, kStackAlignment);
Serguei Katkovc3801912014-07-08 17:21:53 +07002355 int gpr_count = 0;
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002356 for (int i = spill_regs.size() - 1; i >= 0; --i) {
Serguei Katkovc3801912014-07-08 17:21:53 +07002357 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2358 if (spill.IsCpuRegister()) {
2359 pushq(spill.AsCpuRegister());
2360 gpr_count++;
David Srbeckydd973932015-04-07 20:29:48 +01002361 cfi_.AdjustCFAOffset(kFramePointerSize);
2362 cfi_.RelOffset(DWARFReg(spill.AsCpuRegister().AsRegister()), 0);
Serguei Katkovc3801912014-07-08 17:21:53 +07002363 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002364 }
David Srbecky8c578312015-04-07 19:46:22 +01002365 // return address then method on stack.
Serguei Katkovc3801912014-07-08 17:21:53 +07002366 int64_t rest_of_frame = static_cast<int64_t>(frame_size)
2367 - (gpr_count * kFramePointerSize)
2368 - kFramePointerSize /*return address*/;
2369 subq(CpuRegister(RSP), Immediate(rest_of_frame));
David Srbeckydd973932015-04-07 20:29:48 +01002370 cfi_.AdjustCFAOffset(rest_of_frame);
Tong Shen547cdfd2014-08-05 01:54:19 -07002371
Serguei Katkovc3801912014-07-08 17:21:53 +07002372 // spill xmms
2373 int64_t offset = rest_of_frame;
2374 for (int i = spill_regs.size() - 1; i >= 0; --i) {
2375 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2376 if (spill.IsXmmRegister()) {
2377 offset -= sizeof(double);
2378 movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister());
David Srbeckydd973932015-04-07 20:29:48 +01002379 cfi_.RelOffset(DWARFReg(spill.AsXmmRegister().AsFloatRegister()), offset);
Serguei Katkovc3801912014-07-08 17:21:53 +07002380 }
2381 }
Andreas Gampecf4035a2014-05-28 22:43:01 -07002382
2383 DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
Serguei Katkovc3801912014-07-08 17:21:53 +07002384
Andreas Gampecf4035a2014-05-28 22:43:01 -07002385 movl(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002386
2387 for (size_t i = 0; i < entry_spills.size(); ++i) {
2388 ManagedRegisterSpill spill = entry_spills.at(i);
2389 if (spill.AsX86_64().IsCpuRegister()) {
2390 if (spill.getSize() == 8) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002391 movq(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()),
2392 spill.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002393 } else {
2394 CHECK_EQ(spill.getSize(), 4);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002395 movl(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002396 }
2397 } else {
2398 if (spill.getSize() == 8) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002399 movsd(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002400 } else {
2401 CHECK_EQ(spill.getSize(), 4);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002402 movss(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002403 }
2404 }
2405 }
2406}
2407
2408void X86_64Assembler::RemoveFrame(size_t frame_size,
2409 const std::vector<ManagedRegister>& spill_regs) {
2410 CHECK_ALIGNED(frame_size, kStackAlignment);
David Srbeckydd973932015-04-07 20:29:48 +01002411 cfi_.RememberState();
Serguei Katkovc3801912014-07-08 17:21:53 +07002412 int gpr_count = 0;
2413 // unspill xmms
2414 int64_t offset = static_cast<int64_t>(frame_size) - (spill_regs.size() * kFramePointerSize) - 2 * kFramePointerSize;
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002415 for (size_t i = 0; i < spill_regs.size(); ++i) {
Serguei Katkovc3801912014-07-08 17:21:53 +07002416 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2417 if (spill.IsXmmRegister()) {
2418 offset += sizeof(double);
2419 movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset));
David Srbeckydd973932015-04-07 20:29:48 +01002420 cfi_.Restore(DWARFReg(spill.AsXmmRegister().AsFloatRegister()));
Serguei Katkovc3801912014-07-08 17:21:53 +07002421 } else {
2422 gpr_count++;
2423 }
2424 }
David Srbeckydd973932015-04-07 20:29:48 +01002425 int adjust = static_cast<int>(frame_size) - (gpr_count * kFramePointerSize) - kFramePointerSize;
2426 addq(CpuRegister(RSP), Immediate(adjust));
2427 cfi_.AdjustCFAOffset(-adjust);
Serguei Katkovc3801912014-07-08 17:21:53 +07002428 for (size_t i = 0; i < spill_regs.size(); ++i) {
2429 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2430 if (spill.IsCpuRegister()) {
2431 popq(spill.AsCpuRegister());
David Srbeckydd973932015-04-07 20:29:48 +01002432 cfi_.AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
2433 cfi_.Restore(DWARFReg(spill.AsCpuRegister().AsRegister()));
Serguei Katkovc3801912014-07-08 17:21:53 +07002434 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002435 }
2436 ret();
David Srbeckydd973932015-04-07 20:29:48 +01002437 // The CFI should be restored for any code that follows the exit block.
2438 cfi_.RestoreState();
2439 cfi_.DefCFAOffset(frame_size);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002440}
2441
2442void X86_64Assembler::IncreaseFrameSize(size_t adjust) {
2443 CHECK_ALIGNED(adjust, kStackAlignment);
avignate5408b6b2014-06-04 17:59:44 +07002444 addq(CpuRegister(RSP), Immediate(-static_cast<int64_t>(adjust)));
David Srbeckydd973932015-04-07 20:29:48 +01002445 cfi_.AdjustCFAOffset(adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002446}
2447
2448void X86_64Assembler::DecreaseFrameSize(size_t adjust) {
2449 CHECK_ALIGNED(adjust, kStackAlignment);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002450 addq(CpuRegister(RSP), Immediate(adjust));
David Srbeckydd973932015-04-07 20:29:48 +01002451 cfi_.AdjustCFAOffset(-adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002452}
2453
2454void X86_64Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
2455 X86_64ManagedRegister src = msrc.AsX86_64();
2456 if (src.IsNoRegister()) {
2457 CHECK_EQ(0u, size);
2458 } else if (src.IsCpuRegister()) {
2459 if (size == 4) {
2460 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002461 movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002462 } else {
2463 CHECK_EQ(8u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002464 movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002465 }
2466 } else if (src.IsRegisterPair()) {
2467 CHECK_EQ(0u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002468 movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow());
2469 movq(Address(CpuRegister(RSP), FrameOffset(offs.Int32Value()+4)),
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002470 src.AsRegisterPairHigh());
2471 } else if (src.IsX87Register()) {
2472 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002473 fstps(Address(CpuRegister(RSP), offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002474 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002475 fstpl(Address(CpuRegister(RSP), offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002476 }
2477 } else {
2478 CHECK(src.IsXmmRegister());
2479 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002480 movss(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002481 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002482 movsd(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002483 }
2484 }
2485}
2486
2487void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
2488 X86_64ManagedRegister src = msrc.AsX86_64();
2489 CHECK(src.IsCpuRegister());
Andreas Gampecf4035a2014-05-28 22:43:01 -07002490 movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002491}
2492
2493void X86_64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
2494 X86_64ManagedRegister src = msrc.AsX86_64();
2495 CHECK(src.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002496 movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002497}
2498
2499void X86_64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
Ian Rogersdd7624d2014-03-14 17:43:00 -07002500 ManagedRegister) {
2501 movl(Address(CpuRegister(RSP), dest), Immediate(imm)); // TODO(64) movq?
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002502}
2503
Ian Rogersdd7624d2014-03-14 17:43:00 -07002504void X86_64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
2505 ManagedRegister) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002506 gs()->movl(Address::Absolute(dest, true), Immediate(imm)); // TODO(64) movq?
2507}
2508
Ian Rogersdd7624d2014-03-14 17:43:00 -07002509void X86_64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
2510 FrameOffset fr_offs,
2511 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002512 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2513 CHECK(scratch.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002514 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), fr_offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002515 gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
2516}
2517
Ian Rogersdd7624d2014-03-14 17:43:00 -07002518void X86_64Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
2519 gs()->movq(Address::Absolute(thr_offs, true), CpuRegister(RSP));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002520}
2521
2522void X86_64Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
2523 FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
2524 UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
2525}
2526
2527void X86_64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
2528 X86_64ManagedRegister dest = mdest.AsX86_64();
2529 if (dest.IsNoRegister()) {
2530 CHECK_EQ(0u, size);
2531 } else if (dest.IsCpuRegister()) {
2532 if (size == 4) {
2533 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002534 movl(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002535 } else {
2536 CHECK_EQ(8u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002537 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002538 }
2539 } else if (dest.IsRegisterPair()) {
2540 CHECK_EQ(0u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002541 movq(dest.AsRegisterPairLow(), Address(CpuRegister(RSP), src));
2542 movq(dest.AsRegisterPairHigh(), Address(CpuRegister(RSP), FrameOffset(src.Int32Value()+4)));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002543 } else if (dest.IsX87Register()) {
2544 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002545 flds(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002546 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002547 fldl(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002548 }
2549 } else {
2550 CHECK(dest.IsXmmRegister());
2551 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002552 movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002553 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002554 movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002555 }
2556 }
2557}
2558
Ian Rogersdd7624d2014-03-14 17:43:00 -07002559void X86_64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002560 X86_64ManagedRegister dest = mdest.AsX86_64();
2561 if (dest.IsNoRegister()) {
2562 CHECK_EQ(0u, size);
2563 } else if (dest.IsCpuRegister()) {
2564 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002565 gs()->movl(dest.AsCpuRegister(), Address::Absolute(src, true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002566 } else if (dest.IsRegisterPair()) {
2567 CHECK_EQ(8u, size);
2568 gs()->movq(dest.AsRegisterPairLow(), Address::Absolute(src, true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002569 } else if (dest.IsX87Register()) {
2570 if (size == 4) {
2571 gs()->flds(Address::Absolute(src, true));
2572 } else {
2573 gs()->fldl(Address::Absolute(src, true));
2574 }
2575 } else {
2576 CHECK(dest.IsXmmRegister());
2577 if (size == 4) {
2578 gs()->movss(dest.AsXmmRegister(), Address::Absolute(src, true));
2579 } else {
2580 gs()->movsd(dest.AsXmmRegister(), Address::Absolute(src, true));
2581 }
2582 }
2583}
2584
2585void X86_64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
2586 X86_64ManagedRegister dest = mdest.AsX86_64();
2587 CHECK(dest.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002588 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002589}
2590
2591void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
2592 MemberOffset offs) {
2593 X86_64ManagedRegister dest = mdest.AsX86_64();
2594 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
Hiroshi Yamauchif8892672014-09-30 10:56:14 -07002595 movl(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
Hiroshi Yamauchib88f0b12014-09-26 14:55:38 -07002596 if (kPoisonHeapReferences) {
2597 negl(dest.AsCpuRegister());
2598 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002599}
2600
2601void X86_64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
2602 Offset offs) {
2603 X86_64ManagedRegister dest = mdest.AsX86_64();
2604 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
2605 movq(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
2606}
2607
Ian Rogersdd7624d2014-03-14 17:43:00 -07002608void X86_64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest, ThreadOffset<8> offs) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002609 X86_64ManagedRegister dest = mdest.AsX86_64();
2610 CHECK(dest.IsCpuRegister());
2611 gs()->movq(dest.AsCpuRegister(), Address::Absolute(offs, true));
2612}
2613
2614void X86_64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
2615 X86_64ManagedRegister reg = mreg.AsX86_64();
2616 CHECK(size == 1 || size == 2) << size;
2617 CHECK(reg.IsCpuRegister()) << reg;
2618 if (size == 1) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002619 movsxb(reg.AsCpuRegister(), reg.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002620 } else {
2621 movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
2622 }
2623}
2624
2625void X86_64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
2626 X86_64ManagedRegister reg = mreg.AsX86_64();
2627 CHECK(size == 1 || size == 2) << size;
2628 CHECK(reg.IsCpuRegister()) << reg;
2629 if (size == 1) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002630 movzxb(reg.AsCpuRegister(), reg.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002631 } else {
2632 movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
2633 }
2634}
2635
2636void X86_64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
2637 X86_64ManagedRegister dest = mdest.AsX86_64();
2638 X86_64ManagedRegister src = msrc.AsX86_64();
2639 if (!dest.Equals(src)) {
2640 if (dest.IsCpuRegister() && src.IsCpuRegister()) {
2641 movq(dest.AsCpuRegister(), src.AsCpuRegister());
2642 } else if (src.IsX87Register() && dest.IsXmmRegister()) {
2643 // Pass via stack and pop X87 register
Ian Rogersdd7624d2014-03-14 17:43:00 -07002644 subl(CpuRegister(RSP), Immediate(16));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002645 if (size == 4) {
2646 CHECK_EQ(src.AsX87Register(), ST0);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002647 fstps(Address(CpuRegister(RSP), 0));
2648 movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002649 } else {
2650 CHECK_EQ(src.AsX87Register(), ST0);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002651 fstpl(Address(CpuRegister(RSP), 0));
2652 movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002653 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002654 addq(CpuRegister(RSP), Immediate(16));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002655 } else {
2656 // TODO: x87, SSE
2657 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
2658 }
2659 }
2660}
2661
2662void X86_64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
2663 ManagedRegister mscratch) {
2664 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2665 CHECK(scratch.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002666 movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), src));
2667 movl(Address(CpuRegister(RSP), dest), scratch.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002668}
2669
Ian Rogersdd7624d2014-03-14 17:43:00 -07002670void X86_64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
2671 ThreadOffset<8> thr_offs,
2672 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002673 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2674 CHECK(scratch.IsCpuRegister());
2675 gs()->movq(scratch.AsCpuRegister(), Address::Absolute(thr_offs, true));
2676 Store(fr_offs, scratch, 8);
2677}
2678
Ian Rogersdd7624d2014-03-14 17:43:00 -07002679void X86_64Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs,
2680 FrameOffset fr_offs,
2681 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002682 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2683 CHECK(scratch.IsCpuRegister());
2684 Load(scratch, fr_offs, 8);
2685 gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
2686}
2687
2688void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src,
2689 ManagedRegister mscratch,
2690 size_t size) {
2691 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2692 if (scratch.IsCpuRegister() && size == 8) {
2693 Load(scratch, src, 4);
2694 Store(dest, scratch, 4);
2695 Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
2696 Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
2697 } else {
2698 Load(scratch, src, size);
2699 Store(dest, scratch, size);
2700 }
2701}
2702
2703void X86_64Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
2704 ManagedRegister /*scratch*/, size_t /*size*/) {
2705 UNIMPLEMENTED(FATAL);
2706}
2707
2708void X86_64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
2709 ManagedRegister scratch, size_t size) {
2710 CHECK(scratch.IsNoRegister());
2711 CHECK_EQ(size, 4u);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002712 pushq(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002713 popq(Address(dest_base.AsX86_64().AsCpuRegister(), dest_offset));
2714}
2715
2716void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
2717 ManagedRegister mscratch, size_t size) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002718 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002719 CHECK_EQ(size, 4u);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002720 movq(scratch, Address(CpuRegister(RSP), src_base));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002721 movq(scratch, Address(scratch, src_offset));
Ian Rogersdd7624d2014-03-14 17:43:00 -07002722 movq(Address(CpuRegister(RSP), dest), scratch);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002723}
2724
2725void X86_64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
2726 ManagedRegister src, Offset src_offset,
2727 ManagedRegister scratch, size_t size) {
2728 CHECK_EQ(size, 4u);
2729 CHECK(scratch.IsNoRegister());
2730 pushq(Address(src.AsX86_64().AsCpuRegister(), src_offset));
2731 popq(Address(dest.AsX86_64().AsCpuRegister(), dest_offset));
2732}
2733
2734void X86_64Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
2735 ManagedRegister mscratch, size_t size) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002736 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002737 CHECK_EQ(size, 4u);
2738 CHECK_EQ(dest.Int32Value(), src.Int32Value());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002739 movq(scratch, Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002740 pushq(Address(scratch, src_offset));
2741 popq(Address(scratch, dest_offset));
2742}
2743
2744void X86_64Assembler::MemoryBarrier(ManagedRegister) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002745 mfence();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002746}
2747
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002748void X86_64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
2749 FrameOffset handle_scope_offset,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002750 ManagedRegister min_reg, bool null_allowed) {
2751 X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
2752 X86_64ManagedRegister in_reg = min_reg.AsX86_64();
2753 if (in_reg.IsNoRegister()) { // TODO(64): && null_allowed
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002754 // Use out_reg as indicator of null.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002755 in_reg = out_reg;
2756 // TODO: movzwl
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002757 movl(in_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002758 }
2759 CHECK(in_reg.IsCpuRegister());
2760 CHECK(out_reg.IsCpuRegister());
2761 VerifyObject(in_reg, null_allowed);
2762 if (null_allowed) {
2763 Label null_arg;
2764 if (!out_reg.Equals(in_reg)) {
2765 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2766 }
2767 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2768 j(kZero, &null_arg);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002769 leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002770 Bind(&null_arg);
2771 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002772 leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002773 }
2774}
2775
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002776void X86_64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
2777 FrameOffset handle_scope_offset,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002778 ManagedRegister mscratch,
2779 bool null_allowed) {
2780 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2781 CHECK(scratch.IsCpuRegister());
2782 if (null_allowed) {
2783 Label null_arg;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002784 movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002785 testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
2786 j(kZero, &null_arg);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002787 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002788 Bind(&null_arg);
2789 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002790 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002791 }
2792 Store(out_off, scratch, 8);
2793}
2794
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002795// Given a handle scope entry, load the associated reference.
2796void X86_64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002797 ManagedRegister min_reg) {
2798 X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
2799 X86_64ManagedRegister in_reg = min_reg.AsX86_64();
2800 CHECK(out_reg.IsCpuRegister());
2801 CHECK(in_reg.IsCpuRegister());
2802 Label null_arg;
2803 if (!out_reg.Equals(in_reg)) {
2804 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2805 }
2806 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2807 j(kZero, &null_arg);
2808 movq(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
2809 Bind(&null_arg);
2810}
2811
2812void X86_64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
2813 // TODO: not validating references
2814}
2815
2816void X86_64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
2817 // TODO: not validating references
2818}
2819
2820void X86_64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
2821 X86_64ManagedRegister base = mbase.AsX86_64();
2822 CHECK(base.IsCpuRegister());
2823 call(Address(base.AsCpuRegister(), offset.Int32Value()));
2824 // TODO: place reference map on call
2825}
2826
2827void X86_64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002828 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Andreas Gampecf4035a2014-05-28 22:43:01 -07002829 movl(scratch, Address(CpuRegister(RSP), base));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002830 call(Address(scratch, offset));
2831}
2832
Ian Rogersdd7624d2014-03-14 17:43:00 -07002833void X86_64Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister /*mscratch*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002834 gs()->call(Address::Absolute(offset, true));
2835}
2836
2837void X86_64Assembler::GetCurrentThread(ManagedRegister tr) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002838 gs()->movq(tr.AsX86_64().AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002839}
2840
Ian Rogersdd7624d2014-03-14 17:43:00 -07002841void X86_64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002842 X86_64ManagedRegister scratch = mscratch.AsX86_64();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002843 gs()->movq(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
2844 movq(Address(CpuRegister(RSP), offset), scratch.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002845}
2846
Ian Rogersdd7624d2014-03-14 17:43:00 -07002847// Slowpath entered when Thread::Current()->_exception is non-null
2848class X86_64ExceptionSlowPath FINAL : public SlowPath {
2849 public:
2850 explicit X86_64ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {}
2851 virtual void Emit(Assembler *sp_asm) OVERRIDE;
2852 private:
2853 const size_t stack_adjust_;
2854};
2855
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002856void X86_64Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002857 X86_64ExceptionSlowPath* slow = new X86_64ExceptionSlowPath(stack_adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002858 buffer_.EnqueueSlowPath(slow);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002859 gs()->cmpl(Address::Absolute(Thread::ExceptionOffset<8>(), true), Immediate(0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002860 j(kNotEqual, slow->Entry());
2861}
2862
Ian Rogersdd7624d2014-03-14 17:43:00 -07002863void X86_64ExceptionSlowPath::Emit(Assembler *sasm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002864 X86_64Assembler* sp_asm = down_cast<X86_64Assembler*>(sasm);
2865#define __ sp_asm->
2866 __ Bind(&entry_);
2867 // Note: the return value is dead
2868 if (stack_adjust_ != 0) { // Fix up the frame.
2869 __ DecreaseFrameSize(stack_adjust_);
2870 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002871 // Pass exception as argument in RDI
2872 __ gs()->movq(CpuRegister(RDI), Address::Absolute(Thread::ExceptionOffset<8>(), true));
2873 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(8, pDeliverException), true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002874 // this call should never return
2875 __ int3();
2876#undef __
2877}
2878
Mark Mendellf55c3e02015-03-26 21:07:46 -04002879void X86_64Assembler::AddConstantArea() {
2880 const std::vector<int32_t>& area = constant_area_.GetBuffer();
Mark Mendell39dcf552015-04-09 20:42:42 -04002881 for (size_t i = 0, e = area.size(); i < e; i++) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002882 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2883 EmitInt32(area[i]);
2884 }
2885}
2886
2887int ConstantArea::AddInt32(int32_t v) {
Mark Mendell39dcf552015-04-09 20:42:42 -04002888 for (size_t i = 0, e = buffer_.size(); i < e; i++) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002889 if (v == buffer_[i]) {
2890 return i * elem_size_;
2891 }
2892 }
2893
2894 // Didn't match anything.
2895 int result = buffer_.size() * elem_size_;
2896 buffer_.push_back(v);
2897 return result;
2898}
2899
2900int ConstantArea::AddInt64(int64_t v) {
2901 int32_t v_low = v;
2902 int32_t v_high = v >> 32;
2903 if (buffer_.size() > 1) {
2904 // Ensure we don't pass the end of the buffer.
Mark Mendell39dcf552015-04-09 20:42:42 -04002905 for (size_t i = 0, e = buffer_.size() - 1; i < e; i++) {
2906 if (v_low == buffer_[i] && v_high == buffer_[i + 1]) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002907 return i * elem_size_;
2908 }
2909 }
2910 }
2911
2912 // Didn't match anything.
2913 int result = buffer_.size() * elem_size_;
2914 buffer_.push_back(v_low);
2915 buffer_.push_back(v_high);
2916 return result;
2917}
2918
2919int ConstantArea::AddDouble(double v) {
2920 // Treat the value as a 64-bit integer value.
2921 return AddInt64(bit_cast<int64_t, double>(v));
2922}
2923
2924int ConstantArea::AddFloat(float v) {
2925 // Treat the value as a 32-bit integer value.
2926 return AddInt32(bit_cast<int32_t, float>(v));
2927}
2928
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002929} // namespace x86_64
2930} // namespace art