blob: feceecac687f2ab3190c461ff98f473f301cba82 [file] [log] [blame]
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_x86_64.h"
18
19#include "base/casts.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "memory_region.h"
22#include "thread.h"
23
24namespace art {
25namespace x86_64 {
26
Ian Rogersdd7624d2014-03-14 17:43:00 -070027std::ostream& operator<<(std::ostream& os, const CpuRegister& reg) {
28 return os << reg.AsRegister();
29}
30
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070031std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
Ian Rogersdd7624d2014-03-14 17:43:00 -070032 return os << reg.AsFloatRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070033}
34
35std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
36 return os << "ST" << static_cast<int>(reg);
37}
38
Ian Rogersdd7624d2014-03-14 17:43:00 -070039void X86_64Assembler::call(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070040 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070041 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070042 EmitUint8(0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -070043 EmitRegisterOperand(2, reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070044}
45
46
47void X86_64Assembler::call(const Address& address) {
48 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070049 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070050 EmitUint8(0xFF);
51 EmitOperand(2, address);
52}
53
54
55void X86_64Assembler::call(Label* label) {
56 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
57 EmitUint8(0xE8);
58 static const int kSize = 5;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +000059 // Offset by one because we already have emitted the opcode.
60 EmitLabel(label, kSize - 1);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070061}
62
Ian Rogersdd7624d2014-03-14 17:43:00 -070063void X86_64Assembler::pushq(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070064 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070065 EmitOptionalRex32(reg);
66 EmitUint8(0x50 + reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070067}
68
69
70void X86_64Assembler::pushq(const Address& address) {
71 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070072 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070073 EmitUint8(0xFF);
74 EmitOperand(6, address);
75}
76
77
78void X86_64Assembler::pushq(const Immediate& imm) {
79 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -070080 CHECK(imm.is_int32()); // pushq only supports 32b immediate.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070081 if (imm.is_int8()) {
82 EmitUint8(0x6A);
83 EmitUint8(imm.value() & 0xFF);
84 } else {
85 EmitUint8(0x68);
86 EmitImmediate(imm);
87 }
88}
89
90
Ian Rogersdd7624d2014-03-14 17:43:00 -070091void X86_64Assembler::popq(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070092 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -070093 EmitOptionalRex32(reg);
94 EmitUint8(0x58 + reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +070095}
96
97
98void X86_64Assembler::popq(const Address& address) {
99 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700100 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700101 EmitUint8(0x8F);
102 EmitOperand(0, address);
103}
104
105
Ian Rogersdd7624d2014-03-14 17:43:00 -0700106void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700107 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -0700108 if (imm.is_int32()) {
109 // 32 bit. Note: sign-extends.
110 EmitRex64(dst);
111 EmitUint8(0xC7);
112 EmitRegisterOperand(0, dst.LowBits());
113 EmitInt32(static_cast<int32_t>(imm.value()));
114 } else {
115 EmitRex64(dst);
116 EmitUint8(0xB8 + dst.LowBits());
117 EmitInt64(imm.value());
118 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700119}
120
121
Ian Rogersdd7624d2014-03-14 17:43:00 -0700122void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) {
Roland Levillain946e1432014-11-11 17:35:19 +0000123 CHECK(imm.is_int32());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700124 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700125 EmitOptionalRex32(dst);
126 EmitUint8(0xB8 + dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700127 EmitImmediate(imm);
128}
129
130
Mark Mendell40741f32015-04-20 22:10:34 -0400131void X86_64Assembler::movq(const Address& dst, const Immediate& imm) {
132 CHECK(imm.is_int32());
133 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
134 EmitRex64(dst);
135 EmitUint8(0xC7);
136 EmitOperand(0, dst);
137 EmitImmediate(imm);
138}
139
140
Ian Rogersdd7624d2014-03-14 17:43:00 -0700141void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700142 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -0700143 // 0x89 is movq r/m64 <- r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
144 EmitRex64(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700145 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700146 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700147}
148
149
Ian Rogersdd7624d2014-03-14 17:43:00 -0700150void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700151 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700152 EmitOptionalRex32(dst, src);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000153 EmitUint8(0x8B);
154 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700155}
156
157
Ian Rogersdd7624d2014-03-14 17:43:00 -0700158void X86_64Assembler::movq(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700159 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700160 EmitRex64(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700161 EmitUint8(0x8B);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700162 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700163}
164
165
Ian Rogersdd7624d2014-03-14 17:43:00 -0700166void X86_64Assembler::movl(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700168 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700169 EmitUint8(0x8B);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700170 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700171}
172
173
Ian Rogersdd7624d2014-03-14 17:43:00 -0700174void X86_64Assembler::movq(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700175 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700176 EmitRex64(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700177 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700178 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700179}
180
181
Ian Rogersdd7624d2014-03-14 17:43:00 -0700182void X86_64Assembler::movl(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700183 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700184 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700185 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700186 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700187}
188
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700189void X86_64Assembler::movl(const Address& dst, const Immediate& imm) {
190 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700191 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700192 EmitUint8(0xC7);
193 EmitOperand(0, dst);
194 EmitImmediate(imm);
195}
196
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800197
198void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src) {
199 cmov(c, dst, src, true);
200}
201
202void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src, bool is64bit) {
203 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
204 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex());
205 EmitUint8(0x0F);
206 EmitUint8(0x40 + c);
207 EmitRegisterOperand(dst.LowBits(), src.LowBits());
208}
209
210
Ian Rogersdd7624d2014-03-14 17:43:00 -0700211void X86_64Assembler::movzxb(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700212 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700213 EmitOptionalByteRegNormalizingRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700214 EmitUint8(0x0F);
215 EmitUint8(0xB6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700216 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700217}
218
219
Ian Rogersdd7624d2014-03-14 17:43:00 -0700220void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700221 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Chao-ying Fud23840d2015-04-07 16:03:04 -0700222 // Byte register is only in the source register form, so we don't use
223 // EmitOptionalByteRegNormalizingRex32(dst, src);
224 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700225 EmitUint8(0x0F);
226 EmitUint8(0xB6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700227 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700228}
229
230
Ian Rogersdd7624d2014-03-14 17:43:00 -0700231void X86_64Assembler::movsxb(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700232 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700233 EmitOptionalByteRegNormalizingRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700234 EmitUint8(0x0F);
235 EmitUint8(0xBE);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700236 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700237}
238
239
Ian Rogersdd7624d2014-03-14 17:43:00 -0700240void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700241 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Chao-ying Fud23840d2015-04-07 16:03:04 -0700242 // Byte register is only in the source register form, so we don't use
243 // EmitOptionalByteRegNormalizingRex32(dst, src);
244 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700245 EmitUint8(0x0F);
246 EmitUint8(0xBE);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700247 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700248}
249
250
Ian Rogersdd7624d2014-03-14 17:43:00 -0700251void X86_64Assembler::movb(CpuRegister /*dst*/, const Address& /*src*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700252 LOG(FATAL) << "Use movzxb or movsxb instead.";
253}
254
255
Ian Rogersdd7624d2014-03-14 17:43:00 -0700256void X86_64Assembler::movb(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700257 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700258 EmitOptionalByteRegNormalizingRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700259 EmitUint8(0x88);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700260 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700261}
262
263
264void X86_64Assembler::movb(const Address& dst, const Immediate& imm) {
265 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100266 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700267 EmitUint8(0xC6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700268 EmitOperand(Register::RAX, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700269 CHECK(imm.is_int8());
270 EmitUint8(imm.value() & 0xFF);
271}
272
273
Ian Rogersdd7624d2014-03-14 17:43:00 -0700274void X86_64Assembler::movzxw(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700275 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700276 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700277 EmitUint8(0x0F);
278 EmitUint8(0xB7);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700279 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700280}
281
282
Ian Rogersdd7624d2014-03-14 17:43:00 -0700283void X86_64Assembler::movzxw(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700284 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700285 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700286 EmitUint8(0x0F);
287 EmitUint8(0xB7);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700288 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700289}
290
291
Ian Rogersdd7624d2014-03-14 17:43:00 -0700292void X86_64Assembler::movsxw(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700293 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700294 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700295 EmitUint8(0x0F);
296 EmitUint8(0xBF);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700297 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700298}
299
300
Ian Rogersdd7624d2014-03-14 17:43:00 -0700301void X86_64Assembler::movsxw(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700302 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700303 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700304 EmitUint8(0x0F);
305 EmitUint8(0xBF);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700306 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700307}
308
309
Ian Rogersdd7624d2014-03-14 17:43:00 -0700310void X86_64Assembler::movw(CpuRegister /*dst*/, const Address& /*src*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700311 LOG(FATAL) << "Use movzxw or movsxw instead.";
312}
313
314
Ian Rogersdd7624d2014-03-14 17:43:00 -0700315void X86_64Assembler::movw(const Address& dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700316 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
317 EmitOperandSizeOverride();
Nicolas Geoffraye4ded412014-08-05 22:52:45 +0100318 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700319 EmitUint8(0x89);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700320 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700321}
322
323
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100324void X86_64Assembler::movw(const Address& dst, const Immediate& imm) {
325 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
326 EmitOperandSizeOverride();
327 EmitOptionalRex32(dst);
328 EmitUint8(0xC7);
329 EmitOperand(Register::RAX, dst);
Nicolas Geoffrayb6e72062014-10-07 14:54:48 +0100330 CHECK(imm.is_uint16() || imm.is_int16());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +0100331 EmitUint8(imm.value() & 0xFF);
332 EmitUint8(imm.value() >> 8);
333}
334
335
Ian Rogersdd7624d2014-03-14 17:43:00 -0700336void X86_64Assembler::leaq(CpuRegister dst, const Address& src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700337 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700338 EmitRex64(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700339 EmitUint8(0x8D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700340 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700341}
342
343
Nicolas Geoffray748f1402015-01-27 08:17:54 +0000344void X86_64Assembler::leal(CpuRegister dst, const Address& src) {
345 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
346 EmitOptionalRex32(dst, src);
347 EmitUint8(0x8D);
348 EmitOperand(dst.LowBits(), src);
349}
350
351
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100352void X86_64Assembler::movaps(XmmRegister dst, XmmRegister src) {
353 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
354 EmitOptionalRex32(dst, src);
355 EmitUint8(0x0F);
356 EmitUint8(0x28);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100357 EmitXmmRegisterOperand(dst.LowBits(), src);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100358}
359
360
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700361void X86_64Assembler::movss(XmmRegister dst, const Address& src) {
362 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
363 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700364 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700365 EmitUint8(0x0F);
366 EmitUint8(0x10);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700367 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700368}
369
370
371void X86_64Assembler::movss(const Address& dst, XmmRegister src) {
372 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
373 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700374 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700375 EmitUint8(0x0F);
376 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700377 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700378}
379
380
381void X86_64Assembler::movss(XmmRegister dst, XmmRegister src) {
382 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
383 EmitUint8(0xF3);
Andreas Gampe851df202014-11-12 14:05:46 -0800384 EmitOptionalRex32(src, dst); // Movss is MR encoding instead of the usual RM.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700385 EmitUint8(0x0F);
386 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700387 EmitXmmRegisterOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700388}
389
390
Roland Levillaindff1f282014-11-05 14:15:05 +0000391void X86_64Assembler::movsxd(CpuRegister dst, CpuRegister src) {
392 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000393 EmitRex64(dst, src);
Roland Levillaindff1f282014-11-05 14:15:05 +0000394 EmitUint8(0x63);
395 EmitRegisterOperand(dst.LowBits(), src.LowBits());
396}
397
398
399void X86_64Assembler::movsxd(CpuRegister dst, const Address& src) {
400 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -0400401 EmitRex64(dst, src);
Roland Levillaindff1f282014-11-05 14:15:05 +0000402 EmitUint8(0x63);
403 EmitOperand(dst.LowBits(), src);
404}
405
406
Ian Rogersdd7624d2014-03-14 17:43:00 -0700407void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800408 movd(dst, src, true);
409}
410
411void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) {
412 movd(dst, src, true);
413}
414
415void X86_64Assembler::movd(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700416 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
417 EmitUint8(0x66);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800418 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700419 EmitUint8(0x0F);
420 EmitUint8(0x6E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700421 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700422}
423
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800424void X86_64Assembler::movd(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700425 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
426 EmitUint8(0x66);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800427 EmitOptionalRex(false, is64bit, src.NeedsRex(), false, dst.NeedsRex());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700428 EmitUint8(0x0F);
429 EmitUint8(0x7E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700430 EmitOperand(src.LowBits(), Operand(dst));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700431}
432
433
434void X86_64Assembler::addss(XmmRegister dst, XmmRegister src) {
435 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
436 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700437 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700438 EmitUint8(0x0F);
439 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700440 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700441}
442
443
444void X86_64Assembler::addss(XmmRegister dst, const Address& src) {
445 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
446 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700447 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700448 EmitUint8(0x0F);
449 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700450 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700451}
452
453
454void X86_64Assembler::subss(XmmRegister dst, XmmRegister src) {
455 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
456 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700457 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700458 EmitUint8(0x0F);
459 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700460 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700461}
462
463
464void X86_64Assembler::subss(XmmRegister dst, const Address& src) {
465 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
466 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700467 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700468 EmitUint8(0x0F);
469 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700470 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700471}
472
473
474void X86_64Assembler::mulss(XmmRegister dst, XmmRegister src) {
475 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
476 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700477 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700478 EmitUint8(0x0F);
479 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700480 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700481}
482
483
484void X86_64Assembler::mulss(XmmRegister dst, const Address& src) {
485 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
486 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700487 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700488 EmitUint8(0x0F);
489 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700490 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700491}
492
493
494void X86_64Assembler::divss(XmmRegister dst, XmmRegister src) {
495 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
496 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700497 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700498 EmitUint8(0x0F);
499 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700500 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700501}
502
503
504void X86_64Assembler::divss(XmmRegister dst, const Address& src) {
505 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
506 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700507 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700508 EmitUint8(0x0F);
509 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700510 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700511}
512
513
514void X86_64Assembler::flds(const Address& src) {
515 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
516 EmitUint8(0xD9);
517 EmitOperand(0, src);
518}
519
520
Mark Mendell24f2dfa2015-01-14 19:51:45 -0500521void X86_64Assembler::fsts(const Address& dst) {
522 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
523 EmitUint8(0xD9);
524 EmitOperand(2, dst);
525}
526
527
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700528void X86_64Assembler::fstps(const Address& dst) {
529 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
530 EmitUint8(0xD9);
531 EmitOperand(3, dst);
532}
533
534
535void X86_64Assembler::movsd(XmmRegister dst, const Address& src) {
536 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
537 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700538 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700539 EmitUint8(0x0F);
540 EmitUint8(0x10);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700541 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700542}
543
544
545void X86_64Assembler::movsd(const Address& dst, XmmRegister src) {
546 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
547 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700548 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700549 EmitUint8(0x0F);
550 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700551 EmitOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700552}
553
554
555void X86_64Assembler::movsd(XmmRegister dst, XmmRegister src) {
556 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
557 EmitUint8(0xF2);
Andreas Gampe851df202014-11-12 14:05:46 -0800558 EmitOptionalRex32(src, dst); // Movsd is MR encoding instead of the usual RM.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700559 EmitUint8(0x0F);
560 EmitUint8(0x11);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700561 EmitXmmRegisterOperand(src.LowBits(), dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700562}
563
564
565void X86_64Assembler::addsd(XmmRegister dst, XmmRegister src) {
566 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
567 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700568 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700569 EmitUint8(0x0F);
570 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700571 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700572}
573
574
575void X86_64Assembler::addsd(XmmRegister dst, const Address& src) {
576 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
577 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700578 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700579 EmitUint8(0x0F);
580 EmitUint8(0x58);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700581 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700582}
583
584
585void X86_64Assembler::subsd(XmmRegister dst, XmmRegister src) {
586 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
587 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700588 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700589 EmitUint8(0x0F);
590 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700591 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700592}
593
594
595void X86_64Assembler::subsd(XmmRegister dst, const Address& src) {
596 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
597 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700598 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700599 EmitUint8(0x0F);
600 EmitUint8(0x5C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700601 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700602}
603
604
605void X86_64Assembler::mulsd(XmmRegister dst, XmmRegister src) {
606 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
607 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700608 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700609 EmitUint8(0x0F);
610 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700611 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700612}
613
614
615void X86_64Assembler::mulsd(XmmRegister dst, const Address& src) {
616 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
617 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700618 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700619 EmitUint8(0x0F);
620 EmitUint8(0x59);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700621 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700622}
623
624
625void X86_64Assembler::divsd(XmmRegister dst, XmmRegister src) {
626 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
627 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700628 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700629 EmitUint8(0x0F);
630 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700631 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700632}
633
634
635void X86_64Assembler::divsd(XmmRegister dst, const Address& src) {
636 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
637 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700638 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700639 EmitUint8(0x0F);
640 EmitUint8(0x5E);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700641 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700642}
643
644
Ian Rogersdd7624d2014-03-14 17:43:00 -0700645void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src) {
Roland Levillain6d0e4832014-11-27 18:31:21 +0000646 cvtsi2ss(dst, src, false);
647}
648
649
650void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700651 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
652 EmitUint8(0xF3);
Roland Levillain6d0e4832014-11-27 18:31:21 +0000653 if (is64bit) {
654 // Emit a REX.W prefix if the operand size is 64 bits.
655 EmitRex64(dst, src);
656 } else {
657 EmitOptionalRex32(dst, src);
658 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700659 EmitUint8(0x0F);
660 EmitUint8(0x2A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700661 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700662}
663
664
Mark Mendell40741f32015-04-20 22:10:34 -0400665void X86_64Assembler::cvtsi2ss(XmmRegister dst, const Address& src, bool is64bit) {
666 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
667 EmitUint8(0xF3);
668 if (is64bit) {
669 // Emit a REX.W prefix if the operand size is 64 bits.
670 EmitRex64(dst, src);
671 } else {
672 EmitOptionalRex32(dst, src);
673 }
674 EmitUint8(0x0F);
675 EmitUint8(0x2A);
676 EmitOperand(dst.LowBits(), src);
677}
678
679
Ian Rogersdd7624d2014-03-14 17:43:00 -0700680void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src) {
Roland Levillain647b9ed2014-11-27 12:06:00 +0000681 cvtsi2sd(dst, src, false);
682}
683
684
685void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700686 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
687 EmitUint8(0xF2);
Roland Levillain647b9ed2014-11-27 12:06:00 +0000688 if (is64bit) {
689 // Emit a REX.W prefix if the operand size is 64 bits.
690 EmitRex64(dst, src);
691 } else {
692 EmitOptionalRex32(dst, src);
693 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700694 EmitUint8(0x0F);
695 EmitUint8(0x2A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700696 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700697}
698
699
Mark Mendell40741f32015-04-20 22:10:34 -0400700void X86_64Assembler::cvtsi2sd(XmmRegister dst, const Address& src, bool is64bit) {
701 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
702 EmitUint8(0xF2);
703 if (is64bit) {
704 // Emit a REX.W prefix if the operand size is 64 bits.
705 EmitRex64(dst, src);
706 } else {
707 EmitOptionalRex32(dst, src);
708 }
709 EmitUint8(0x0F);
710 EmitUint8(0x2A);
711 EmitOperand(dst.LowBits(), src);
712}
713
714
Ian Rogersdd7624d2014-03-14 17:43:00 -0700715void X86_64Assembler::cvtss2si(CpuRegister dst, XmmRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700716 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
717 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700718 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700719 EmitUint8(0x0F);
720 EmitUint8(0x2D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700721 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700722}
723
724
725void X86_64Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
726 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
727 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700728 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700729 EmitUint8(0x0F);
730 EmitUint8(0x5A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700731 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700732}
733
734
Mark Mendell40741f32015-04-20 22:10:34 -0400735void X86_64Assembler::cvtss2sd(XmmRegister dst, const Address& src) {
736 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
737 EmitUint8(0xF3);
738 EmitOptionalRex32(dst, src);
739 EmitUint8(0x0F);
740 EmitUint8(0x5A);
741 EmitOperand(dst.LowBits(), src);
742}
743
744
Ian Rogersdd7624d2014-03-14 17:43:00 -0700745void X86_64Assembler::cvtsd2si(CpuRegister dst, XmmRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700746 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
747 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700748 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700749 EmitUint8(0x0F);
750 EmitUint8(0x2D);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700751 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700752}
753
754
Ian Rogersdd7624d2014-03-14 17:43:00 -0700755void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src) {
Roland Levillain624279f2014-12-04 11:54:28 +0000756 cvttss2si(dst, src, false);
757}
758
759
760void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700761 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
762 EmitUint8(0xF3);
Roland Levillain624279f2014-12-04 11:54:28 +0000763 if (is64bit) {
764 // Emit a REX.W prefix if the operand size is 64 bits.
765 EmitRex64(dst, src);
766 } else {
767 EmitOptionalRex32(dst, src);
768 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700769 EmitUint8(0x0F);
770 EmitUint8(0x2C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700771 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700772}
773
774
Ian Rogersdd7624d2014-03-14 17:43:00 -0700775void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src) {
Roland Levillain4c0b61f2014-12-05 12:06:01 +0000776 cvttsd2si(dst, src, false);
777}
778
779
780void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src, bool is64bit) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700781 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
782 EmitUint8(0xF2);
Roland Levillain4c0b61f2014-12-05 12:06:01 +0000783 if (is64bit) {
784 // Emit a REX.W prefix if the operand size is 64 bits.
785 EmitRex64(dst, src);
786 } else {
787 EmitOptionalRex32(dst, src);
788 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700789 EmitUint8(0x0F);
790 EmitUint8(0x2C);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700791 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700792}
793
794
795void X86_64Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
796 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
797 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700798 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700799 EmitUint8(0x0F);
800 EmitUint8(0x5A);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700801 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700802}
803
804
Mark Mendell40741f32015-04-20 22:10:34 -0400805void X86_64Assembler::cvtsd2ss(XmmRegister dst, const Address& src) {
806 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
807 EmitUint8(0xF2);
808 EmitOptionalRex32(dst, src);
809 EmitUint8(0x0F);
810 EmitUint8(0x5A);
811 EmitOperand(dst.LowBits(), src);
812}
813
814
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700815void X86_64Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
816 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
817 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700818 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700819 EmitUint8(0x0F);
820 EmitUint8(0xE6);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700821 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700822}
823
824
825void X86_64Assembler::comiss(XmmRegister a, XmmRegister b) {
826 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700827 EmitOptionalRex32(a, b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700828 EmitUint8(0x0F);
829 EmitUint8(0x2F);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700830 EmitXmmRegisterOperand(a.LowBits(), b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700831}
832
833
Mark Mendell40741f32015-04-20 22:10:34 -0400834void X86_64Assembler::comiss(XmmRegister a, const Address& b) {
835 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
836 EmitOptionalRex32(a, b);
837 EmitUint8(0x0F);
838 EmitUint8(0x2F);
839 EmitOperand(a.LowBits(), b);
840}
841
842
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700843void X86_64Assembler::comisd(XmmRegister a, XmmRegister b) {
844 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
845 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700846 EmitOptionalRex32(a, b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700847 EmitUint8(0x0F);
848 EmitUint8(0x2F);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700849 EmitXmmRegisterOperand(a.LowBits(), b);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700850}
851
Mark Mendell40741f32015-04-20 22:10:34 -0400852
853void X86_64Assembler::comisd(XmmRegister a, const Address& b) {
854 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
855 EmitUint8(0x66);
856 EmitOptionalRex32(a, b);
857 EmitUint8(0x0F);
858 EmitUint8(0x2F);
859 EmitOperand(a.LowBits(), b);
860}
861
862
Calin Juravleddb7df22014-11-25 20:56:51 +0000863void X86_64Assembler::ucomiss(XmmRegister a, XmmRegister b) {
864 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
865 EmitOptionalRex32(a, b);
866 EmitUint8(0x0F);
867 EmitUint8(0x2E);
868 EmitXmmRegisterOperand(a.LowBits(), b);
869}
870
871
Mark Mendell40741f32015-04-20 22:10:34 -0400872void X86_64Assembler::ucomiss(XmmRegister a, const Address& b) {
873 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
874 EmitOptionalRex32(a, b);
875 EmitUint8(0x0F);
876 EmitUint8(0x2E);
877 EmitOperand(a.LowBits(), b);
878}
879
880
Calin Juravleddb7df22014-11-25 20:56:51 +0000881void X86_64Assembler::ucomisd(XmmRegister a, XmmRegister b) {
882 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
883 EmitUint8(0x66);
884 EmitOptionalRex32(a, b);
885 EmitUint8(0x0F);
886 EmitUint8(0x2E);
887 EmitXmmRegisterOperand(a.LowBits(), b);
888}
889
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700890
Mark Mendell40741f32015-04-20 22:10:34 -0400891void X86_64Assembler::ucomisd(XmmRegister a, const Address& b) {
892 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
893 EmitUint8(0x66);
894 EmitOptionalRex32(a, b);
895 EmitUint8(0x0F);
896 EmitUint8(0x2E);
897 EmitOperand(a.LowBits(), b);
898}
899
900
Mark Mendellfb8d2792015-03-31 22:16:59 -0400901void X86_64Assembler::roundsd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
902 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
903 EmitUint8(0x66);
904 EmitOptionalRex32(dst, src);
905 EmitUint8(0x0F);
906 EmitUint8(0x3A);
907 EmitUint8(0x0B);
908 EmitXmmRegisterOperand(dst.LowBits(), src);
909 EmitUint8(imm.value());
910}
911
912
913void X86_64Assembler::roundss(XmmRegister dst, XmmRegister src, const Immediate& imm) {
914 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
915 EmitUint8(0x66);
916 EmitOptionalRex32(dst, src);
917 EmitUint8(0x0F);
918 EmitUint8(0x3A);
919 EmitUint8(0x0A);
920 EmitXmmRegisterOperand(dst.LowBits(), src);
921 EmitUint8(imm.value());
922}
923
924
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700925void X86_64Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
926 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
927 EmitUint8(0xF2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700928 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700929 EmitUint8(0x0F);
930 EmitUint8(0x51);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700931 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700932}
933
934
935void X86_64Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
936 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
937 EmitUint8(0xF3);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700938 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700939 EmitUint8(0x0F);
940 EmitUint8(0x51);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700941 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700942}
943
944
945void X86_64Assembler::xorpd(XmmRegister dst, const Address& src) {
946 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
947 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700948 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700949 EmitUint8(0x0F);
950 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700951 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700952}
953
954
955void X86_64Assembler::xorpd(XmmRegister dst, XmmRegister src) {
956 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
957 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700958 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700959 EmitUint8(0x0F);
960 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700961 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700962}
963
964
965void X86_64Assembler::xorps(XmmRegister dst, const Address& src) {
966 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700967 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700968 EmitUint8(0x0F);
969 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700970 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700971}
972
973
974void X86_64Assembler::xorps(XmmRegister dst, XmmRegister src) {
975 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700976 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700977 EmitUint8(0x0F);
978 EmitUint8(0x57);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700979 EmitXmmRegisterOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700980}
981
982
983void X86_64Assembler::andpd(XmmRegister dst, const Address& src) {
984 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
985 EmitUint8(0x66);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700986 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700987 EmitUint8(0x0F);
988 EmitUint8(0x54);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700989 EmitOperand(dst.LowBits(), src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700990}
991
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800992void X86_64Assembler::andpd(XmmRegister dst, XmmRegister src) {
993 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
994 EmitUint8(0x66);
995 EmitOptionalRex32(dst, src);
996 EmitUint8(0x0F);
997 EmitUint8(0x54);
998 EmitXmmRegisterOperand(dst.LowBits(), src);
999}
1000
1001void X86_64Assembler::andps(XmmRegister dst, XmmRegister src) {
1002 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1003 EmitOptionalRex32(dst, src);
1004 EmitUint8(0x0F);
1005 EmitUint8(0x54);
1006 EmitXmmRegisterOperand(dst.LowBits(), src);
1007}
1008
1009void X86_64Assembler::orpd(XmmRegister dst, XmmRegister src) {
1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1011 EmitUint8(0x66);
1012 EmitOptionalRex32(dst, src);
1013 EmitUint8(0x0F);
1014 EmitUint8(0x56);
1015 EmitXmmRegisterOperand(dst.LowBits(), src);
1016}
1017
1018void X86_64Assembler::orps(XmmRegister dst, XmmRegister src) {
1019 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1020 EmitOptionalRex32(dst, src);
1021 EmitUint8(0x0F);
1022 EmitUint8(0x56);
1023 EmitXmmRegisterOperand(dst.LowBits(), src);
1024}
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001025
1026void X86_64Assembler::fldl(const Address& src) {
1027 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1028 EmitUint8(0xDD);
1029 EmitOperand(0, src);
1030}
1031
1032
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001033void X86_64Assembler::fstl(const Address& dst) {
1034 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1035 EmitUint8(0xDD);
1036 EmitOperand(2, dst);
1037}
1038
1039
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001040void X86_64Assembler::fstpl(const Address& dst) {
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 EmitUint8(0xDD);
1043 EmitOperand(3, dst);
1044}
1045
1046
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001047void X86_64Assembler::fstsw() {
1048 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1049 EmitUint8(0x9B);
1050 EmitUint8(0xDF);
1051 EmitUint8(0xE0);
1052}
1053
1054
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001055void X86_64Assembler::fnstcw(const Address& dst) {
1056 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1057 EmitUint8(0xD9);
1058 EmitOperand(7, dst);
1059}
1060
1061
1062void X86_64Assembler::fldcw(const Address& src) {
1063 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1064 EmitUint8(0xD9);
1065 EmitOperand(5, src);
1066}
1067
1068
1069void X86_64Assembler::fistpl(const Address& dst) {
1070 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1071 EmitUint8(0xDF);
1072 EmitOperand(7, dst);
1073}
1074
1075
1076void X86_64Assembler::fistps(const Address& dst) {
1077 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1078 EmitUint8(0xDB);
1079 EmitOperand(3, dst);
1080}
1081
1082
1083void X86_64Assembler::fildl(const Address& src) {
1084 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1085 EmitUint8(0xDF);
1086 EmitOperand(5, src);
1087}
1088
1089
Roland Levillain0a186012015-04-13 17:00:20 +01001090void X86_64Assembler::filds(const Address& src) {
1091 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1092 EmitUint8(0xDB);
1093 EmitOperand(0, src);
1094}
1095
1096
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001097void X86_64Assembler::fincstp() {
1098 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1099 EmitUint8(0xD9);
1100 EmitUint8(0xF7);
1101}
1102
1103
1104void X86_64Assembler::ffree(const Immediate& index) {
1105 CHECK_LT(index.value(), 7);
1106 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1107 EmitUint8(0xDD);
1108 EmitUint8(0xC0 + index.value());
1109}
1110
1111
1112void X86_64Assembler::fsin() {
1113 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1114 EmitUint8(0xD9);
1115 EmitUint8(0xFE);
1116}
1117
1118
1119void X86_64Assembler::fcos() {
1120 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1121 EmitUint8(0xD9);
1122 EmitUint8(0xFF);
1123}
1124
1125
1126void X86_64Assembler::fptan() {
1127 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1128 EmitUint8(0xD9);
1129 EmitUint8(0xF2);
1130}
1131
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001132void X86_64Assembler::fucompp() {
1133 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1134 EmitUint8(0xDA);
1135 EmitUint8(0xE9);
1136}
1137
1138
1139void X86_64Assembler::fprem() {
1140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1141 EmitUint8(0xD9);
1142 EmitUint8(0xF8);
1143}
1144
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001145
Ian Rogersdd7624d2014-03-14 17:43:00 -07001146void X86_64Assembler::xchgl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001147 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001148 // There is a short version for rax.
1149 // It's a bit awkward, as CpuRegister has a const field, so assignment and thus swapping doesn't
1150 // work.
1151 const bool src_rax = src.AsRegister() == RAX;
1152 const bool dst_rax = dst.AsRegister() == RAX;
1153 if (src_rax || dst_rax) {
1154 EmitOptionalRex32(src_rax ? dst : src);
1155 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits()));
1156 return;
1157 }
1158
1159 // General case.
1160 EmitOptionalRex32(src, dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001161 EmitUint8(0x87);
Andreas Gampe851df202014-11-12 14:05:46 -08001162 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001163}
1164
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001165
1166void X86_64Assembler::xchgq(CpuRegister dst, CpuRegister src) {
1167 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001168 // There is a short version for rax.
1169 // It's a bit awkward, as CpuRegister has a const field, so assignment and thus swapping doesn't
1170 // work.
1171 const bool src_rax = src.AsRegister() == RAX;
1172 const bool dst_rax = dst.AsRegister() == RAX;
1173 if (src_rax || dst_rax) {
1174 // If src == target, emit a nop instead.
1175 if (src_rax && dst_rax) {
1176 EmitUint8(0x90);
1177 } else {
1178 EmitRex64(src_rax ? dst : src);
1179 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits()));
1180 }
1181 return;
1182 }
1183
1184 // General case.
1185 EmitRex64(src, dst);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001186 EmitUint8(0x87);
Andreas Gampe851df202014-11-12 14:05:46 -08001187 EmitRegisterOperand(src.LowBits(), dst.LowBits());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001188}
1189
1190
Ian Rogersdd7624d2014-03-14 17:43:00 -07001191void X86_64Assembler::xchgl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001192 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001193 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001194 EmitUint8(0x87);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001195 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001196}
1197
1198
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001199void X86_64Assembler::cmpw(const Address& address, const Immediate& imm) {
1200 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1201 EmitOptionalRex32(address);
1202 EmitUint8(0x66);
1203 EmitComplex(7, address, imm);
1204}
1205
1206
Ian Rogersdd7624d2014-03-14 17:43:00 -07001207void X86_64Assembler::cmpl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001208 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001209 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001210 EmitComplex(7, Operand(reg), imm);
1211}
1212
1213
Ian Rogersdd7624d2014-03-14 17:43:00 -07001214void X86_64Assembler::cmpl(CpuRegister reg0, CpuRegister reg1) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001215 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001216 EmitOptionalRex32(reg0, reg1);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001217 EmitUint8(0x3B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001218 EmitOperand(reg0.LowBits(), Operand(reg1));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001219}
1220
1221
Ian Rogersdd7624d2014-03-14 17:43:00 -07001222void X86_64Assembler::cmpl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001223 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001224 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001225 EmitUint8(0x3B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001226 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001227}
1228
1229
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001230void X86_64Assembler::cmpl(const Address& address, CpuRegister reg) {
1231 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1232 EmitOptionalRex32(reg, address);
1233 EmitUint8(0x39);
1234 EmitOperand(reg.LowBits(), address);
1235}
1236
1237
1238void X86_64Assembler::cmpl(const Address& address, const Immediate& imm) {
1239 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1240 EmitOptionalRex32(address);
1241 EmitComplex(7, address, imm);
1242}
1243
1244
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001245void X86_64Assembler::cmpq(CpuRegister reg0, CpuRegister reg1) {
1246 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1247 EmitRex64(reg0, reg1);
1248 EmitUint8(0x3B);
1249 EmitOperand(reg0.LowBits(), Operand(reg1));
1250}
1251
1252
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001253void X86_64Assembler::cmpq(CpuRegister reg, const Immediate& imm) {
1254 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1255 CHECK(imm.is_int32()); // cmpq only supports 32b immediate.
1256 EmitRex64(reg);
1257 EmitComplex(7, Operand(reg), imm);
1258}
1259
1260
1261void X86_64Assembler::cmpq(CpuRegister reg, const Address& address) {
1262 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell40741f32015-04-20 22:10:34 -04001263 EmitRex64(reg, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001264 EmitUint8(0x3B);
1265 EmitOperand(reg.LowBits(), address);
1266}
1267
1268
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001269void X86_64Assembler::cmpq(const Address& address, const Immediate& imm) {
1270 CHECK(imm.is_int32()); // cmpq only supports 32b immediate.
1271 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1272 EmitRex64(address);
1273 EmitComplex(7, address, imm);
1274}
1275
1276
Ian Rogersdd7624d2014-03-14 17:43:00 -07001277void X86_64Assembler::addl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001278 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001279 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001280 EmitUint8(0x03);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001281 EmitRegisterOperand(dst.LowBits(), src.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001282}
1283
1284
Ian Rogersdd7624d2014-03-14 17:43:00 -07001285void X86_64Assembler::addl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001286 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001287 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001288 EmitUint8(0x03);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001289 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001290}
1291
1292
Ian Rogersdd7624d2014-03-14 17:43:00 -07001293void X86_64Assembler::testl(CpuRegister reg1, CpuRegister reg2) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001294 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001295 EmitOptionalRex32(reg1, reg2);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001296 EmitUint8(0x85);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001297 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001298}
1299
1300
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001301void X86_64Assembler::testl(CpuRegister reg, const Address& address) {
1302 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1303 EmitOptionalRex32(reg, address);
1304 EmitUint8(0x85);
1305 EmitOperand(reg.LowBits(), address);
1306}
1307
1308
Ian Rogersdd7624d2014-03-14 17:43:00 -07001309void X86_64Assembler::testl(CpuRegister reg, const Immediate& immediate) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001310 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1311 // For registers that have a byte variant (RAX, RBX, RCX, and RDX)
Ian Rogersdd7624d2014-03-14 17:43:00 -07001312 // we only test the byte CpuRegister to keep the encoding short.
1313 if (immediate.is_uint8() && reg.AsRegister() < 4) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001314 // Use zero-extended 8-bit immediate.
Ian Rogersdd7624d2014-03-14 17:43:00 -07001315 if (reg.AsRegister() == RAX) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001316 EmitUint8(0xA8);
1317 } else {
1318 EmitUint8(0xF6);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001319 EmitUint8(0xC0 + reg.AsRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001320 }
1321 EmitUint8(immediate.value() & 0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001322 } else if (reg.AsRegister() == RAX) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001323 // Use short form if the destination is RAX.
1324 EmitUint8(0xA9);
1325 EmitImmediate(immediate);
1326 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001327 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001328 EmitUint8(0xF7);
1329 EmitOperand(0, Operand(reg));
1330 EmitImmediate(immediate);
1331 }
1332}
1333
1334
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001335void X86_64Assembler::testq(CpuRegister reg1, CpuRegister reg2) {
1336 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1337 EmitRex64(reg1, reg2);
1338 EmitUint8(0x85);
1339 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits());
1340}
1341
1342
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001343void X86_64Assembler::testq(CpuRegister reg, const Address& address) {
1344 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001345 EmitRex64(reg, address);
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001346 EmitUint8(0x85);
1347 EmitOperand(reg.LowBits(), address);
1348}
1349
1350
Ian Rogersdd7624d2014-03-14 17:43:00 -07001351void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001352 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001353 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001354 EmitUint8(0x23);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001355 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001356}
1357
1358
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001359void X86_64Assembler::andl(CpuRegister reg, const Address& address) {
1360 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1361 EmitOptionalRex32(reg, address);
1362 EmitUint8(0x23);
1363 EmitOperand(reg.LowBits(), address);
1364}
1365
1366
Ian Rogersdd7624d2014-03-14 17:43:00 -07001367void X86_64Assembler::andl(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001368 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001369 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001370 EmitComplex(4, Operand(dst), imm);
1371}
1372
1373
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001374void X86_64Assembler::andq(CpuRegister reg, const Immediate& imm) {
1375 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1376 CHECK(imm.is_int32()); // andq only supports 32b immediate.
1377 EmitRex64(reg);
1378 EmitComplex(4, Operand(reg), imm);
1379}
1380
1381
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001382void X86_64Assembler::andq(CpuRegister dst, CpuRegister src) {
1383 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1384 EmitRex64(dst, src);
1385 EmitUint8(0x23);
1386 EmitOperand(dst.LowBits(), Operand(src));
1387}
1388
1389
Mark Mendell40741f32015-04-20 22:10:34 -04001390void X86_64Assembler::andq(CpuRegister dst, const Address& src) {
1391 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1392 EmitRex64(dst, src);
1393 EmitUint8(0x23);
1394 EmitOperand(dst.LowBits(), src);
1395}
1396
1397
Ian Rogersdd7624d2014-03-14 17:43:00 -07001398void X86_64Assembler::orl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001399 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001400 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001401 EmitUint8(0x0B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001402 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001403}
1404
1405
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001406void X86_64Assembler::orl(CpuRegister reg, const Address& address) {
1407 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1408 EmitOptionalRex32(reg, address);
1409 EmitUint8(0x0B);
1410 EmitOperand(reg.LowBits(), address);
1411}
1412
1413
Ian Rogersdd7624d2014-03-14 17:43:00 -07001414void X86_64Assembler::orl(CpuRegister dst, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001415 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001416 EmitOptionalRex32(dst);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001417 EmitComplex(1, Operand(dst), imm);
1418}
1419
1420
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001421void X86_64Assembler::orq(CpuRegister dst, const Immediate& imm) {
1422 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1423 CHECK(imm.is_int32()); // orq only supports 32b immediate.
1424 EmitRex64(dst);
1425 EmitComplex(1, Operand(dst), imm);
1426}
1427
1428
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001429void X86_64Assembler::orq(CpuRegister dst, CpuRegister src) {
1430 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1431 EmitRex64(dst, src);
1432 EmitUint8(0x0B);
1433 EmitOperand(dst.LowBits(), Operand(src));
1434}
1435
1436
Mark Mendell40741f32015-04-20 22:10:34 -04001437void X86_64Assembler::orq(CpuRegister dst, const Address& src) {
1438 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1439 EmitRex64(dst, src);
1440 EmitUint8(0x0B);
1441 EmitOperand(dst.LowBits(), src);
1442}
1443
1444
Ian Rogersdd7624d2014-03-14 17:43:00 -07001445void X86_64Assembler::xorl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001446 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001447 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001448 EmitUint8(0x33);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001449 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001450}
1451
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001452
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00001453void X86_64Assembler::xorl(CpuRegister reg, const Address& address) {
1454 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1455 EmitOptionalRex32(reg, address);
1456 EmitUint8(0x33);
1457 EmitOperand(reg.LowBits(), address);
1458}
1459
1460
1461void X86_64Assembler::xorl(CpuRegister dst, const Immediate& imm) {
1462 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1463 EmitOptionalRex32(dst);
1464 EmitComplex(6, Operand(dst), imm);
1465}
1466
1467
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001468void X86_64Assembler::xorq(CpuRegister dst, CpuRegister src) {
1469 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1470 EmitRex64(dst, src);
1471 EmitUint8(0x33);
1472 EmitOperand(dst.LowBits(), Operand(src));
1473}
1474
1475
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001476void X86_64Assembler::xorq(CpuRegister dst, const Immediate& imm) {
1477 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1478 CHECK(imm.is_int32()); // xorq only supports 32b immediate.
1479 EmitRex64(dst);
1480 EmitComplex(6, Operand(dst), imm);
1481}
1482
Mark Mendell40741f32015-04-20 22:10:34 -04001483void X86_64Assembler::xorq(CpuRegister dst, const Address& src) {
1484 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1485 EmitRex64(dst, src);
1486 EmitUint8(0x33);
1487 EmitOperand(dst.LowBits(), src);
1488}
1489
1490
Ian Rogersdd7624d2014-03-14 17:43:00 -07001491#if 0
1492void X86_64Assembler::rex(bool force, bool w, Register* r, Register* x, Register* b) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001493 // REX.WRXB
1494 // W - 64-bit operand
1495 // R - MODRM.reg
1496 // X - SIB.index
1497 // B - MODRM.rm/SIB.base
Ian Rogersdd7624d2014-03-14 17:43:00 -07001498 uint8_t rex = force ? 0x40 : 0;
1499 if (w) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001500 rex |= 0x48; // REX.W000
1501 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001502 if (r != nullptr && *r >= Register::R8 && *r < Register::kNumberOfCpuRegisters) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001503 rex |= 0x44; // REX.0R00
Ian Rogersdd7624d2014-03-14 17:43:00 -07001504 *r = static_cast<Register>(*r - 8);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001505 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001506 if (x != nullptr && *x >= Register::R8 && *x < Register::kNumberOfCpuRegisters) {
1507 rex |= 0x42; // REX.00X0
1508 *x = static_cast<Register>(*x - 8);
1509 }
1510 if (b != nullptr && *b >= Register::R8 && *b < Register::kNumberOfCpuRegisters) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001511 rex |= 0x41; // REX.000B
Ian Rogersdd7624d2014-03-14 17:43:00 -07001512 *b = static_cast<Register>(*b - 8);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001513 }
1514 if (rex != 0) {
1515 EmitUint8(rex);
1516 }
1517}
1518
Ian Rogersdd7624d2014-03-14 17:43:00 -07001519void X86_64Assembler::rex_reg_mem(bool force, bool w, Register* dst, const Address& mem) {
1520 // REX.WRXB
1521 // W - 64-bit operand
1522 // R - MODRM.reg
1523 // X - SIB.index
1524 // B - MODRM.rm/SIB.base
1525 uint8_t rex = mem->rex();
1526 if (force) {
1527 rex |= 0x40; // REX.0000
1528 }
1529 if (w) {
1530 rex |= 0x48; // REX.W000
1531 }
1532 if (dst != nullptr && *dst >= Register::R8 && *dst < Register::kNumberOfCpuRegisters) {
1533 rex |= 0x44; // REX.0R00
1534 *dst = static_cast<Register>(*dst - 8);
1535 }
1536 if (rex != 0) {
1537 EmitUint8(rex);
1538 }
1539}
1540
1541void rex_mem_reg(bool force, bool w, Address* mem, Register* src);
1542#endif
1543
1544void X86_64Assembler::addl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001545 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001546 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001547 EmitComplex(0, Operand(reg), imm);
1548}
1549
1550
Ian Rogersdd7624d2014-03-14 17:43:00 -07001551void X86_64Assembler::addq(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001552 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001553 CHECK(imm.is_int32()); // addq only supports 32b immediate.
Ian Rogersdd7624d2014-03-14 17:43:00 -07001554 EmitRex64(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001555 EmitComplex(0, Operand(reg), imm);
1556}
1557
1558
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001559void X86_64Assembler::addq(CpuRegister dst, const Address& address) {
1560 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001561 EmitRex64(dst, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001562 EmitUint8(0x03);
1563 EmitOperand(dst.LowBits(), address);
1564}
1565
1566
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001567void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) {
1568 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1569 // 0x01 is addq r/m64 <- r/m64 + r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
1570 EmitRex64(src, dst);
1571 EmitUint8(0x01);
1572 EmitRegisterOperand(src.LowBits(), dst.LowBits());
1573}
1574
1575
Ian Rogersdd7624d2014-03-14 17:43:00 -07001576void X86_64Assembler::addl(const Address& address, CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001577 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001578 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001579 EmitUint8(0x01);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001580 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001581}
1582
1583
1584void X86_64Assembler::addl(const Address& address, const Immediate& imm) {
1585 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001586 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001587 EmitComplex(0, address, imm);
1588}
1589
1590
Ian Rogersdd7624d2014-03-14 17:43:00 -07001591void X86_64Assembler::subl(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001592 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001593 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001594 EmitUint8(0x2B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001595 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001596}
1597
1598
Ian Rogersdd7624d2014-03-14 17:43:00 -07001599void X86_64Assembler::subl(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001600 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001601 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001602 EmitComplex(5, Operand(reg), imm);
1603}
1604
1605
Andreas Gampe5a4fa822014-03-31 16:50:12 -07001606void X86_64Assembler::subq(CpuRegister reg, const Immediate& imm) {
1607 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1608 CHECK(imm.is_int32()); // subq only supports 32b immediate.
1609 EmitRex64(reg);
1610 EmitComplex(5, Operand(reg), imm);
1611}
1612
1613
1614void X86_64Assembler::subq(CpuRegister dst, CpuRegister src) {
1615 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1616 EmitRex64(dst, src);
1617 EmitUint8(0x2B);
1618 EmitRegisterOperand(dst.LowBits(), src.LowBits());
1619}
1620
1621
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001622void X86_64Assembler::subq(CpuRegister reg, const Address& address) {
1623 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell7fd8b592015-04-22 10:46:07 -04001624 EmitRex64(reg, address);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001625 EmitUint8(0x2B);
1626 EmitOperand(reg.LowBits() & 7, address);
1627}
1628
1629
Ian Rogersdd7624d2014-03-14 17:43:00 -07001630void X86_64Assembler::subl(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001631 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001632 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001633 EmitUint8(0x2B);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001634 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001635}
1636
1637
1638void X86_64Assembler::cdq() {
1639 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1640 EmitUint8(0x99);
1641}
1642
1643
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001644void X86_64Assembler::cqo() {
1645 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1646 EmitRex64();
1647 EmitUint8(0x99);
1648}
1649
1650
Ian Rogersdd7624d2014-03-14 17:43:00 -07001651void X86_64Assembler::idivl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001652 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001653 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001654 EmitUint8(0xF7);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001655 EmitUint8(0xF8 | reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001656}
1657
1658
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001659void X86_64Assembler::idivq(CpuRegister reg) {
1660 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1661 EmitRex64(reg);
1662 EmitUint8(0xF7);
1663 EmitUint8(0xF8 | reg.LowBits());
1664}
1665
1666
Ian Rogersdd7624d2014-03-14 17:43:00 -07001667void X86_64Assembler::imull(CpuRegister dst, CpuRegister src) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001668 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001669 EmitOptionalRex32(dst, src);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001670 EmitUint8(0x0F);
1671 EmitUint8(0xAF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001672 EmitOperand(dst.LowBits(), Operand(src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001673}
1674
Ian Rogersdd7624d2014-03-14 17:43:00 -07001675void X86_64Assembler::imull(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001676 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Andreas Gampe851df202014-11-12 14:05:46 -08001677 CHECK(imm.is_int32()); // imull only supports 32b immediate.
1678
Nicolas Geoffrayb5de00f2014-10-24 15:43:49 +01001679 EmitOptionalRex32(reg, reg);
Andreas Gampe851df202014-11-12 14:05:46 -08001680
1681 // See whether imm can be represented as a sign-extended 8bit value.
1682 int32_t v32 = static_cast<int32_t>(imm.value());
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001683 if (IsInt<8>(v32)) {
Andreas Gampe851df202014-11-12 14:05:46 -08001684 // Sign-extension works.
1685 EmitUint8(0x6B);
1686 EmitOperand(reg.LowBits(), Operand(reg));
1687 EmitUint8(static_cast<uint8_t>(v32 & 0xFF));
1688 } else {
1689 // Not representable, use full immediate.
1690 EmitUint8(0x69);
1691 EmitOperand(reg.LowBits(), Operand(reg));
1692 EmitImmediate(imm);
1693 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001694}
1695
1696
Ian Rogersdd7624d2014-03-14 17:43:00 -07001697void X86_64Assembler::imull(CpuRegister reg, const Address& address) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001698 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001699 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001700 EmitUint8(0x0F);
1701 EmitUint8(0xAF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001702 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001703}
1704
1705
Calin Juravle34bacdf2014-10-07 20:23:36 +01001706void X86_64Assembler::imulq(CpuRegister dst, CpuRegister src) {
1707 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1708 EmitRex64(dst, src);
1709 EmitUint8(0x0F);
1710 EmitUint8(0xAF);
1711 EmitRegisterOperand(dst.LowBits(), src.LowBits());
1712}
1713
1714
1715void X86_64Assembler::imulq(CpuRegister reg, const Immediate& imm) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001716 imulq(reg, reg, imm);
1717}
1718
1719void X86_64Assembler::imulq(CpuRegister dst, CpuRegister reg, const Immediate& imm) {
Calin Juravle34bacdf2014-10-07 20:23:36 +01001720 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1721 CHECK(imm.is_int32()); // imulq only supports 32b immediate.
Andreas Gampe851df202014-11-12 14:05:46 -08001722
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001723 EmitRex64(dst, reg);
Andreas Gampe851df202014-11-12 14:05:46 -08001724
1725 // See whether imm can be represented as a sign-extended 8bit value.
1726 int64_t v64 = imm.value();
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001727 if (IsInt<8>(v64)) {
Andreas Gampe851df202014-11-12 14:05:46 -08001728 // Sign-extension works.
1729 EmitUint8(0x6B);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001730 EmitOperand(dst.LowBits(), Operand(reg));
Andreas Gampe851df202014-11-12 14:05:46 -08001731 EmitUint8(static_cast<uint8_t>(v64 & 0xFF));
1732 } else {
1733 // Not representable, use full immediate.
1734 EmitUint8(0x69);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001735 EmitOperand(dst.LowBits(), Operand(reg));
Andreas Gampe851df202014-11-12 14:05:46 -08001736 EmitImmediate(imm);
1737 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01001738}
1739
Calin Juravle34bacdf2014-10-07 20:23:36 +01001740void X86_64Assembler::imulq(CpuRegister reg, const Address& address) {
1741 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1742 EmitRex64(reg, address);
1743 EmitUint8(0x0F);
1744 EmitUint8(0xAF);
1745 EmitOperand(reg.LowBits(), address);
1746}
1747
1748
Ian Rogersdd7624d2014-03-14 17:43:00 -07001749void X86_64Assembler::imull(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001750 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001751 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001752 EmitUint8(0xF7);
1753 EmitOperand(5, Operand(reg));
1754}
1755
1756
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01001757void X86_64Assembler::imulq(CpuRegister reg) {
1758 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1759 EmitRex64(reg);
1760 EmitUint8(0xF7);
1761 EmitOperand(5, Operand(reg));
1762}
1763
1764
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001765void X86_64Assembler::imull(const Address& address) {
1766 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001767 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001768 EmitUint8(0xF7);
1769 EmitOperand(5, address);
1770}
1771
1772
Ian Rogersdd7624d2014-03-14 17:43:00 -07001773void X86_64Assembler::mull(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001774 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001775 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001776 EmitUint8(0xF7);
1777 EmitOperand(4, Operand(reg));
1778}
1779
1780
1781void X86_64Assembler::mull(const Address& address) {
1782 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001783 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001784 EmitUint8(0xF7);
1785 EmitOperand(4, address);
1786}
1787
1788
Ian Rogersdd7624d2014-03-14 17:43:00 -07001789void X86_64Assembler::shll(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001790 EmitGenericShift(false, 4, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001791}
1792
1793
Calin Juravle9aec02f2014-11-18 23:06:35 +00001794void X86_64Assembler::shlq(CpuRegister reg, const Immediate& imm) {
1795 EmitGenericShift(true, 4, reg, imm);
1796}
1797
1798
Ian Rogersdd7624d2014-03-14 17:43:00 -07001799void X86_64Assembler::shll(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001800 EmitGenericShift(false, 4, operand, shifter);
1801}
1802
1803
1804void X86_64Assembler::shlq(CpuRegister operand, CpuRegister shifter) {
1805 EmitGenericShift(true, 4, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001806}
1807
1808
Ian Rogersdd7624d2014-03-14 17:43:00 -07001809void X86_64Assembler::shrl(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001810 EmitGenericShift(false, 5, reg, imm);
1811}
1812
1813
1814void X86_64Assembler::shrq(CpuRegister reg, const Immediate& imm) {
1815 EmitGenericShift(true, 5, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001816}
1817
1818
Ian Rogersdd7624d2014-03-14 17:43:00 -07001819void X86_64Assembler::shrl(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001820 EmitGenericShift(false, 5, operand, shifter);
1821}
1822
1823
1824void X86_64Assembler::shrq(CpuRegister operand, CpuRegister shifter) {
1825 EmitGenericShift(true, 5, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001826}
1827
1828
Ian Rogersdd7624d2014-03-14 17:43:00 -07001829void X86_64Assembler::sarl(CpuRegister reg, const Immediate& imm) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01001830 EmitGenericShift(false, 7, reg, imm);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001831}
1832
1833
Ian Rogersdd7624d2014-03-14 17:43:00 -07001834void X86_64Assembler::sarl(CpuRegister operand, CpuRegister shifter) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001835 EmitGenericShift(false, 7, operand, shifter);
1836}
1837
1838
1839void X86_64Assembler::sarq(CpuRegister reg, const Immediate& imm) {
1840 EmitGenericShift(true, 7, reg, imm);
1841}
1842
1843
1844void X86_64Assembler::sarq(CpuRegister operand, CpuRegister shifter) {
1845 EmitGenericShift(true, 7, operand, shifter);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001846}
1847
1848
Ian Rogersdd7624d2014-03-14 17:43:00 -07001849void X86_64Assembler::negl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001850 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001851 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001852 EmitUint8(0xF7);
1853 EmitOperand(3, Operand(reg));
1854}
1855
Roland Levillain70566432014-10-24 16:20:17 +01001856
Roland Levillain2e07b4f2014-10-23 18:12:09 +01001857void X86_64Assembler::negq(CpuRegister reg) {
1858 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1859 EmitRex64(reg);
1860 EmitUint8(0xF7);
1861 EmitOperand(3, Operand(reg));
1862}
1863
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001864
Ian Rogersdd7624d2014-03-14 17:43:00 -07001865void X86_64Assembler::notl(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001866 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001867 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001868 EmitUint8(0xF7);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001869 EmitUint8(0xD0 | reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001870}
1871
1872
Roland Levillain70566432014-10-24 16:20:17 +01001873void X86_64Assembler::notq(CpuRegister reg) {
1874 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1875 EmitRex64(reg);
1876 EmitUint8(0xF7);
1877 EmitOperand(2, Operand(reg));
1878}
1879
1880
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001881void X86_64Assembler::enter(const Immediate& imm) {
1882 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1883 EmitUint8(0xC8);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001884 CHECK(imm.is_uint16()) << imm.value();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001885 EmitUint8(imm.value() & 0xFF);
1886 EmitUint8((imm.value() >> 8) & 0xFF);
1887 EmitUint8(0x00);
1888}
1889
1890
1891void X86_64Assembler::leave() {
1892 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1893 EmitUint8(0xC9);
1894}
1895
1896
1897void X86_64Assembler::ret() {
1898 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1899 EmitUint8(0xC3);
1900}
1901
1902
1903void X86_64Assembler::ret(const Immediate& imm) {
1904 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1905 EmitUint8(0xC2);
1906 CHECK(imm.is_uint16());
1907 EmitUint8(imm.value() & 0xFF);
1908 EmitUint8((imm.value() >> 8) & 0xFF);
1909}
1910
1911
1912
1913void X86_64Assembler::nop() {
1914 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1915 EmitUint8(0x90);
1916}
1917
1918
1919void X86_64Assembler::int3() {
1920 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1921 EmitUint8(0xCC);
1922}
1923
1924
1925void X86_64Assembler::hlt() {
1926 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1927 EmitUint8(0xF4);
1928}
1929
1930
1931void X86_64Assembler::j(Condition condition, Label* label) {
1932 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1933 if (label->IsBound()) {
1934 static const int kShortSize = 2;
1935 static const int kLongSize = 6;
1936 int offset = label->Position() - buffer_.Size();
1937 CHECK_LE(offset, 0);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001938 if (IsInt<8>(offset - kShortSize)) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001939 EmitUint8(0x70 + condition);
1940 EmitUint8((offset - kShortSize) & 0xFF);
1941 } else {
1942 EmitUint8(0x0F);
1943 EmitUint8(0x80 + condition);
1944 EmitInt32(offset - kLongSize);
1945 }
1946 } else {
1947 EmitUint8(0x0F);
1948 EmitUint8(0x80 + condition);
1949 EmitLabelLink(label);
1950 }
1951}
1952
1953
Ian Rogersdd7624d2014-03-14 17:43:00 -07001954void X86_64Assembler::jmp(CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001955 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001956 EmitOptionalRex32(reg);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001957 EmitUint8(0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001958 EmitRegisterOperand(4, reg.LowBits());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001959}
1960
1961void X86_64Assembler::jmp(const Address& address) {
1962 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001963 EmitOptionalRex32(address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001964 EmitUint8(0xFF);
1965 EmitOperand(4, address);
1966}
1967
1968void X86_64Assembler::jmp(Label* label) {
1969 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1970 if (label->IsBound()) {
1971 static const int kShortSize = 2;
1972 static const int kLongSize = 5;
1973 int offset = label->Position() - buffer_.Size();
1974 CHECK_LE(offset, 0);
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001975 if (IsInt<8>(offset - kShortSize)) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001976 EmitUint8(0xEB);
1977 EmitUint8((offset - kShortSize) & 0xFF);
1978 } else {
1979 EmitUint8(0xE9);
1980 EmitInt32(offset - kLongSize);
1981 }
1982 } else {
1983 EmitUint8(0xE9);
1984 EmitLabelLink(label);
1985 }
1986}
1987
1988
1989X86_64Assembler* X86_64Assembler::lock() {
1990 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1991 EmitUint8(0xF0);
1992 return this;
1993}
1994
1995
Ian Rogersdd7624d2014-03-14 17:43:00 -07001996void X86_64Assembler::cmpxchgl(const Address& address, CpuRegister reg) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001997 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001998 EmitOptionalRex32(reg, address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07001999 EmitUint8(0x0F);
2000 EmitUint8(0xB1);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002001 EmitOperand(reg.LowBits(), address);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002002}
2003
Mark Mendell58d25fd2015-04-03 14:52:31 -04002004
2005void X86_64Assembler::cmpxchgq(const Address& address, CpuRegister reg) {
2006 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2007 EmitRex64(reg, address);
2008 EmitUint8(0x0F);
2009 EmitUint8(0xB1);
2010 EmitOperand(reg.LowBits(), address);
2011}
2012
2013
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002014void X86_64Assembler::mfence() {
2015 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2016 EmitUint8(0x0F);
2017 EmitUint8(0xAE);
2018 EmitUint8(0xF0);
2019}
2020
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002021
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002022X86_64Assembler* X86_64Assembler::gs() {
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002023 // TODO: gs is a prefix and not an instruction
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002024 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2025 EmitUint8(0x65);
2026 return this;
2027}
2028
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002029
Ian Rogersdd7624d2014-03-14 17:43:00 -07002030void X86_64Assembler::AddImmediate(CpuRegister reg, const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002031 int value = imm.value();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002032 if (value != 0) {
2033 if (value > 0) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002034 addl(reg, imm);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002035 } else {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002036 subl(reg, Immediate(value));
2037 }
2038 }
2039}
2040
2041
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002042void X86_64Assembler::setcc(Condition condition, CpuRegister dst) {
2043 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2044 // RSP, RBP, RDI, RSI need rex prefix (else the pattern encodes ah/bh/ch/dh).
2045 if (dst.NeedsRex() || dst.AsRegister() > 3) {
2046 EmitOptionalRex(true, false, false, false, dst.NeedsRex());
2047 }
2048 EmitUint8(0x0F);
2049 EmitUint8(0x90 + condition);
2050 EmitUint8(0xC0 + dst.LowBits());
2051}
2052
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002053void X86_64Assembler::bswapl(CpuRegister dst) {
2054 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2055 EmitOptionalRex(false, false, false, false, dst.NeedsRex());
2056 EmitUint8(0x0F);
2057 EmitUint8(0xC8 + dst.LowBits());
2058}
2059
2060void X86_64Assembler::bswapq(CpuRegister dst) {
2061 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2062 EmitOptionalRex(false, true, false, false, dst.NeedsRex());
2063 EmitUint8(0x0F);
2064 EmitUint8(0xC8 + dst.LowBits());
2065}
2066
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002067
Andreas Gampe21030dd2015-05-07 14:46:15 -07002068void X86_64Assembler::repne_scasw() {
2069 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2070 EmitUint8(0x66);
2071 EmitUint8(0xF2);
2072 EmitUint8(0xAF);
2073}
2074
2075
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002076void X86_64Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
2077 // TODO: Need to have a code constants table.
2078 int64_t constant = bit_cast<int64_t, double>(value);
2079 pushq(Immediate(High32Bits(constant)));
2080 pushq(Immediate(Low32Bits(constant)));
Ian Rogersdd7624d2014-03-14 17:43:00 -07002081 movsd(dst, Address(CpuRegister(RSP), 0));
Ian Rogers13735952014-10-08 12:43:28 -07002082 addq(CpuRegister(RSP), Immediate(2 * sizeof(intptr_t)));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002083}
2084
2085
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002086void X86_64Assembler::Align(int alignment, int offset) {
2087 CHECK(IsPowerOfTwo(alignment));
2088 // Emit nop instruction until the real position is aligned.
2089 while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
2090 nop();
2091 }
2092}
2093
2094
2095void X86_64Assembler::Bind(Label* label) {
2096 int bound = buffer_.Size();
2097 CHECK(!label->IsBound()); // Labels can only be bound once.
2098 while (label->IsLinked()) {
2099 int position = label->LinkPosition();
2100 int next = buffer_.Load<int32_t>(position);
2101 buffer_.Store<int32_t>(position, bound - (position + 4));
2102 label->position_ = next;
2103 }
2104 label->BindTo(bound);
2105}
2106
2107
Ian Rogersdd7624d2014-03-14 17:43:00 -07002108void X86_64Assembler::EmitOperand(uint8_t reg_or_opcode, const Operand& operand) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002109 CHECK_GE(reg_or_opcode, 0);
2110 CHECK_LT(reg_or_opcode, 8);
2111 const int length = operand.length_;
2112 CHECK_GT(length, 0);
2113 // Emit the ModRM byte updated with the given reg value.
2114 CHECK_EQ(operand.encoding_[0] & 0x38, 0);
2115 EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
2116 // Emit the rest of the encoded operand.
2117 for (int i = 1; i < length; i++) {
2118 EmitUint8(operand.encoding_[i]);
2119 }
Mark Mendellf55c3e02015-03-26 21:07:46 -04002120 AssemblerFixup* fixup = operand.GetFixup();
2121 if (fixup != nullptr) {
2122 EmitFixup(fixup);
2123 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002124}
2125
2126
2127void X86_64Assembler::EmitImmediate(const Immediate& imm) {
Andreas Gampe5a4fa822014-03-31 16:50:12 -07002128 if (imm.is_int32()) {
2129 EmitInt32(static_cast<int32_t>(imm.value()));
2130 } else {
2131 EmitInt64(imm.value());
2132 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002133}
2134
2135
Ian Rogersdd7624d2014-03-14 17:43:00 -07002136void X86_64Assembler::EmitComplex(uint8_t reg_or_opcode,
2137 const Operand& operand,
2138 const Immediate& immediate) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002139 CHECK_GE(reg_or_opcode, 0);
2140 CHECK_LT(reg_or_opcode, 8);
2141 if (immediate.is_int8()) {
2142 // Use sign-extended 8-bit immediate.
2143 EmitUint8(0x83);
2144 EmitOperand(reg_or_opcode, operand);
2145 EmitUint8(immediate.value() & 0xFF);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002146 } else if (operand.IsRegister(CpuRegister(RAX))) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002147 // Use short form if the destination is eax.
2148 EmitUint8(0x05 + (reg_or_opcode << 3));
2149 EmitImmediate(immediate);
2150 } else {
2151 EmitUint8(0x81);
2152 EmitOperand(reg_or_opcode, operand);
2153 EmitImmediate(immediate);
2154 }
2155}
2156
2157
2158void X86_64Assembler::EmitLabel(Label* label, int instruction_size) {
2159 if (label->IsBound()) {
2160 int offset = label->Position() - buffer_.Size();
2161 CHECK_LE(offset, 0);
2162 EmitInt32(offset - instruction_size);
2163 } else {
2164 EmitLabelLink(label);
2165 }
2166}
2167
2168
2169void X86_64Assembler::EmitLabelLink(Label* label) {
2170 CHECK(!label->IsBound());
2171 int position = buffer_.Size();
2172 EmitInt32(label->position_);
2173 label->LinkTo(position);
2174}
2175
2176
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002177void X86_64Assembler::EmitGenericShift(bool wide,
2178 int reg_or_opcode,
2179 CpuRegister reg,
2180 const Immediate& imm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002181 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2182 CHECK(imm.is_int8());
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002183 if (wide) {
2184 EmitRex64(reg);
Andreas Gampe851df202014-11-12 14:05:46 -08002185 } else {
2186 EmitOptionalRex32(reg);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002187 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002188 if (imm.value() == 1) {
2189 EmitUint8(0xD1);
2190 EmitOperand(reg_or_opcode, Operand(reg));
2191 } else {
2192 EmitUint8(0xC1);
2193 EmitOperand(reg_or_opcode, Operand(reg));
2194 EmitUint8(imm.value() & 0xFF);
2195 }
2196}
2197
2198
Calin Juravle9aec02f2014-11-18 23:06:35 +00002199void X86_64Assembler::EmitGenericShift(bool wide,
2200 int reg_or_opcode,
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002201 CpuRegister operand,
2202 CpuRegister shifter) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002203 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002204 CHECK_EQ(shifter.AsRegister(), RCX);
Calin Juravle9aec02f2014-11-18 23:06:35 +00002205 if (wide) {
2206 EmitRex64(operand);
2207 } else {
2208 EmitOptionalRex32(operand);
2209 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002210 EmitUint8(0xD3);
2211 EmitOperand(reg_or_opcode, Operand(operand));
2212}
2213
Ian Rogersdd7624d2014-03-14 17:43:00 -07002214void X86_64Assembler::EmitOptionalRex(bool force, bool w, bool r, bool x, bool b) {
2215 // REX.WRXB
2216 // W - 64-bit operand
2217 // R - MODRM.reg
2218 // X - SIB.index
2219 // B - MODRM.rm/SIB.base
2220 uint8_t rex = force ? 0x40 : 0;
2221 if (w) {
2222 rex |= 0x48; // REX.W000
2223 }
2224 if (r) {
2225 rex |= 0x44; // REX.0R00
2226 }
2227 if (x) {
2228 rex |= 0x42; // REX.00X0
2229 }
2230 if (b) {
2231 rex |= 0x41; // REX.000B
2232 }
2233 if (rex != 0) {
2234 EmitUint8(rex);
2235 }
2236}
2237
2238void X86_64Assembler::EmitOptionalRex32(CpuRegister reg) {
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002239 EmitOptionalRex(false, false, false, false, reg.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002240}
2241
2242void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, CpuRegister src) {
2243 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2244}
2245
2246void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, XmmRegister src) {
2247 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2248}
2249
2250void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, XmmRegister src) {
2251 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2252}
2253
2254void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, CpuRegister src) {
2255 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
2256}
2257
2258void X86_64Assembler::EmitOptionalRex32(const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002259 uint8_t rex = operand.rex();
2260 if (rex != 0) {
2261 EmitUint8(rex);
2262 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002263}
2264
2265void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002266 uint8_t rex = operand.rex();
2267 if (dst.NeedsRex()) {
2268 rex |= 0x44; // REX.0R00
2269 }
2270 if (rex != 0) {
2271 EmitUint8(rex);
2272 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002273}
2274
2275void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002276 uint8_t rex = operand.rex();
2277 if (dst.NeedsRex()) {
2278 rex |= 0x44; // REX.0R00
2279 }
2280 if (rex != 0) {
2281 EmitUint8(rex);
2282 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002283}
2284
Calin Juravled6fb6cf2014-11-11 19:07:44 +00002285void X86_64Assembler::EmitRex64() {
2286 EmitOptionalRex(false, true, false, false, false);
2287}
2288
Ian Rogersdd7624d2014-03-14 17:43:00 -07002289void X86_64Assembler::EmitRex64(CpuRegister reg) {
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002290 EmitOptionalRex(false, true, false, false, reg.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002291}
Vladimir Kostyukovfba52f12014-04-15 15:41:47 +07002292
Calin Juravled6fb6cf2014-11-11 19:07:44 +00002293void X86_64Assembler::EmitRex64(const Operand& operand) {
2294 uint8_t rex = operand.rex();
2295 rex |= 0x48; // REX.W000
2296 EmitUint8(rex);
2297}
2298
Ian Rogersdd7624d2014-03-14 17:43:00 -07002299void X86_64Assembler::EmitRex64(CpuRegister dst, CpuRegister src) {
2300 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2301}
2302
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002303void X86_64Assembler::EmitRex64(XmmRegister dst, CpuRegister src) {
2304 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2305}
2306
Roland Levillain624279f2014-12-04 11:54:28 +00002307void X86_64Assembler::EmitRex64(CpuRegister dst, XmmRegister src) {
2308 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
2309}
2310
Ian Rogersdd7624d2014-03-14 17:43:00 -07002311void X86_64Assembler::EmitRex64(CpuRegister dst, const Operand& operand) {
Ian Rogers790a6b72014-04-01 10:36:00 -07002312 uint8_t rex = 0x48 | operand.rex(); // REX.W000
2313 if (dst.NeedsRex()) {
2314 rex |= 0x44; // REX.0R00
2315 }
Mark Mendell40741f32015-04-20 22:10:34 -04002316 EmitUint8(rex);
2317}
2318
2319void X86_64Assembler::EmitRex64(XmmRegister dst, const Operand& operand) {
2320 uint8_t rex = 0x48 | operand.rex(); // REX.W000
2321 if (dst.NeedsRex()) {
2322 rex |= 0x44; // REX.0R00
Ian Rogers790a6b72014-04-01 10:36:00 -07002323 }
Mark Mendell40741f32015-04-20 22:10:34 -04002324 EmitUint8(rex);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002325}
2326
2327void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, CpuRegister src) {
Chao-ying Fud23840d2015-04-07 16:03:04 -07002328 // For src, SPL, BPL, SIL, DIL need the rex prefix.
2329 bool force = src.AsRegister() > 3;
2330 EmitOptionalRex(force, false, dst.NeedsRex(), false, src.NeedsRex());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002331}
2332
2333void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, const Operand& operand) {
Chao-ying Fud23840d2015-04-07 16:03:04 -07002334 uint8_t rex = operand.rex();
2335 // For dst, SPL, BPL, SIL, DIL need the rex prefix.
2336 bool force = dst.AsRegister() > 3;
2337 if (force) {
2338 rex |= 0x40; // REX.0000
2339 }
Ian Rogers790a6b72014-04-01 10:36:00 -07002340 if (dst.NeedsRex()) {
2341 rex |= 0x44; // REX.0R00
2342 }
2343 if (rex != 0) {
2344 EmitUint8(rex);
2345 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002346}
2347
David Srbeckydd973932015-04-07 20:29:48 +01002348static dwarf::Reg DWARFReg(Register reg) {
2349 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
2350}
2351static dwarf::Reg DWARFReg(FloatRegister reg) {
2352 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
2353}
2354
Ian Rogers790a6b72014-04-01 10:36:00 -07002355constexpr size_t kFramePointerSize = 8;
2356
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002357void X86_64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
Ian Rogers790a6b72014-04-01 10:36:00 -07002358 const std::vector<ManagedRegister>& spill_regs,
2359 const ManagedRegisterEntrySpills& entry_spills) {
David Srbecky8c578312015-04-07 19:46:22 +01002360 DCHECK_EQ(buffer_.Size(), 0U); // Nothing emitted yet.
David Srbeckydd973932015-04-07 20:29:48 +01002361 cfi_.SetCurrentCFAOffset(8); // Return address on stack.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002362 CHECK_ALIGNED(frame_size, kStackAlignment);
Serguei Katkovc3801912014-07-08 17:21:53 +07002363 int gpr_count = 0;
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002364 for (int i = spill_regs.size() - 1; i >= 0; --i) {
Serguei Katkovc3801912014-07-08 17:21:53 +07002365 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2366 if (spill.IsCpuRegister()) {
2367 pushq(spill.AsCpuRegister());
2368 gpr_count++;
David Srbeckydd973932015-04-07 20:29:48 +01002369 cfi_.AdjustCFAOffset(kFramePointerSize);
2370 cfi_.RelOffset(DWARFReg(spill.AsCpuRegister().AsRegister()), 0);
Serguei Katkovc3801912014-07-08 17:21:53 +07002371 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002372 }
David Srbecky8c578312015-04-07 19:46:22 +01002373 // return address then method on stack.
Serguei Katkovc3801912014-07-08 17:21:53 +07002374 int64_t rest_of_frame = static_cast<int64_t>(frame_size)
2375 - (gpr_count * kFramePointerSize)
2376 - kFramePointerSize /*return address*/;
2377 subq(CpuRegister(RSP), Immediate(rest_of_frame));
David Srbeckydd973932015-04-07 20:29:48 +01002378 cfi_.AdjustCFAOffset(rest_of_frame);
Tong Shen547cdfd2014-08-05 01:54:19 -07002379
Serguei Katkovc3801912014-07-08 17:21:53 +07002380 // spill xmms
2381 int64_t offset = rest_of_frame;
2382 for (int i = spill_regs.size() - 1; i >= 0; --i) {
2383 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2384 if (spill.IsXmmRegister()) {
2385 offset -= sizeof(double);
2386 movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister());
David Srbeckydd973932015-04-07 20:29:48 +01002387 cfi_.RelOffset(DWARFReg(spill.AsXmmRegister().AsFloatRegister()), offset);
Serguei Katkovc3801912014-07-08 17:21:53 +07002388 }
2389 }
Andreas Gampecf4035a2014-05-28 22:43:01 -07002390
2391 DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
Serguei Katkovc3801912014-07-08 17:21:53 +07002392
Andreas Gampecf4035a2014-05-28 22:43:01 -07002393 movl(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002394
2395 for (size_t i = 0; i < entry_spills.size(); ++i) {
2396 ManagedRegisterSpill spill = entry_spills.at(i);
2397 if (spill.AsX86_64().IsCpuRegister()) {
2398 if (spill.getSize() == 8) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002399 movq(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()),
2400 spill.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002401 } else {
2402 CHECK_EQ(spill.getSize(), 4);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002403 movl(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002404 }
2405 } else {
2406 if (spill.getSize() == 8) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002407 movsd(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002408 } else {
2409 CHECK_EQ(spill.getSize(), 4);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002410 movss(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002411 }
2412 }
2413 }
2414}
2415
2416void X86_64Assembler::RemoveFrame(size_t frame_size,
2417 const std::vector<ManagedRegister>& spill_regs) {
2418 CHECK_ALIGNED(frame_size, kStackAlignment);
David Srbeckydd973932015-04-07 20:29:48 +01002419 cfi_.RememberState();
Serguei Katkovc3801912014-07-08 17:21:53 +07002420 int gpr_count = 0;
2421 // unspill xmms
2422 int64_t offset = static_cast<int64_t>(frame_size) - (spill_regs.size() * kFramePointerSize) - 2 * kFramePointerSize;
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002423 for (size_t i = 0; i < spill_regs.size(); ++i) {
Serguei Katkovc3801912014-07-08 17:21:53 +07002424 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2425 if (spill.IsXmmRegister()) {
2426 offset += sizeof(double);
2427 movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset));
David Srbeckydd973932015-04-07 20:29:48 +01002428 cfi_.Restore(DWARFReg(spill.AsXmmRegister().AsFloatRegister()));
Serguei Katkovc3801912014-07-08 17:21:53 +07002429 } else {
2430 gpr_count++;
2431 }
2432 }
David Srbeckydd973932015-04-07 20:29:48 +01002433 int adjust = static_cast<int>(frame_size) - (gpr_count * kFramePointerSize) - kFramePointerSize;
2434 addq(CpuRegister(RSP), Immediate(adjust));
2435 cfi_.AdjustCFAOffset(-adjust);
Serguei Katkovc3801912014-07-08 17:21:53 +07002436 for (size_t i = 0; i < spill_regs.size(); ++i) {
2437 x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
2438 if (spill.IsCpuRegister()) {
2439 popq(spill.AsCpuRegister());
David Srbeckydd973932015-04-07 20:29:48 +01002440 cfi_.AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
2441 cfi_.Restore(DWARFReg(spill.AsCpuRegister().AsRegister()));
Serguei Katkovc3801912014-07-08 17:21:53 +07002442 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002443 }
2444 ret();
David Srbeckydd973932015-04-07 20:29:48 +01002445 // The CFI should be restored for any code that follows the exit block.
2446 cfi_.RestoreState();
2447 cfi_.DefCFAOffset(frame_size);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002448}
2449
2450void X86_64Assembler::IncreaseFrameSize(size_t adjust) {
2451 CHECK_ALIGNED(adjust, kStackAlignment);
avignate5408b6b2014-06-04 17:59:44 +07002452 addq(CpuRegister(RSP), Immediate(-static_cast<int64_t>(adjust)));
David Srbeckydd973932015-04-07 20:29:48 +01002453 cfi_.AdjustCFAOffset(adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002454}
2455
2456void X86_64Assembler::DecreaseFrameSize(size_t adjust) {
2457 CHECK_ALIGNED(adjust, kStackAlignment);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002458 addq(CpuRegister(RSP), Immediate(adjust));
David Srbeckydd973932015-04-07 20:29:48 +01002459 cfi_.AdjustCFAOffset(-adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002460}
2461
2462void X86_64Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
2463 X86_64ManagedRegister src = msrc.AsX86_64();
2464 if (src.IsNoRegister()) {
2465 CHECK_EQ(0u, size);
2466 } else if (src.IsCpuRegister()) {
2467 if (size == 4) {
2468 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002469 movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002470 } else {
2471 CHECK_EQ(8u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002472 movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002473 }
2474 } else if (src.IsRegisterPair()) {
2475 CHECK_EQ(0u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002476 movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow());
2477 movq(Address(CpuRegister(RSP), FrameOffset(offs.Int32Value()+4)),
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002478 src.AsRegisterPairHigh());
2479 } else if (src.IsX87Register()) {
2480 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002481 fstps(Address(CpuRegister(RSP), offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002482 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002483 fstpl(Address(CpuRegister(RSP), offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002484 }
2485 } else {
2486 CHECK(src.IsXmmRegister());
2487 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002488 movss(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002489 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002490 movsd(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002491 }
2492 }
2493}
2494
2495void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
2496 X86_64ManagedRegister src = msrc.AsX86_64();
2497 CHECK(src.IsCpuRegister());
Andreas Gampecf4035a2014-05-28 22:43:01 -07002498 movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002499}
2500
2501void X86_64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
2502 X86_64ManagedRegister src = msrc.AsX86_64();
2503 CHECK(src.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002504 movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002505}
2506
2507void X86_64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
Ian Rogersdd7624d2014-03-14 17:43:00 -07002508 ManagedRegister) {
2509 movl(Address(CpuRegister(RSP), dest), Immediate(imm)); // TODO(64) movq?
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002510}
2511
Ian Rogersdd7624d2014-03-14 17:43:00 -07002512void X86_64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
2513 ManagedRegister) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002514 gs()->movl(Address::Absolute(dest, true), Immediate(imm)); // TODO(64) movq?
2515}
2516
Ian Rogersdd7624d2014-03-14 17:43:00 -07002517void X86_64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
2518 FrameOffset fr_offs,
2519 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002520 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2521 CHECK(scratch.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002522 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), fr_offs));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002523 gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
2524}
2525
Ian Rogersdd7624d2014-03-14 17:43:00 -07002526void X86_64Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
2527 gs()->movq(Address::Absolute(thr_offs, true), CpuRegister(RSP));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002528}
2529
2530void X86_64Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
2531 FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
2532 UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
2533}
2534
2535void X86_64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
2536 X86_64ManagedRegister dest = mdest.AsX86_64();
2537 if (dest.IsNoRegister()) {
2538 CHECK_EQ(0u, size);
2539 } else if (dest.IsCpuRegister()) {
2540 if (size == 4) {
2541 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002542 movl(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002543 } else {
2544 CHECK_EQ(8u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002545 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002546 }
2547 } else if (dest.IsRegisterPair()) {
2548 CHECK_EQ(0u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002549 movq(dest.AsRegisterPairLow(), Address(CpuRegister(RSP), src));
2550 movq(dest.AsRegisterPairHigh(), Address(CpuRegister(RSP), FrameOffset(src.Int32Value()+4)));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002551 } else if (dest.IsX87Register()) {
2552 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002553 flds(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002554 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002555 fldl(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002556 }
2557 } else {
2558 CHECK(dest.IsXmmRegister());
2559 if (size == 4) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002560 movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002561 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002562 movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002563 }
2564 }
2565}
2566
Ian Rogersdd7624d2014-03-14 17:43:00 -07002567void X86_64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002568 X86_64ManagedRegister dest = mdest.AsX86_64();
2569 if (dest.IsNoRegister()) {
2570 CHECK_EQ(0u, size);
2571 } else if (dest.IsCpuRegister()) {
2572 CHECK_EQ(4u, size);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002573 gs()->movl(dest.AsCpuRegister(), Address::Absolute(src, true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002574 } else if (dest.IsRegisterPair()) {
2575 CHECK_EQ(8u, size);
2576 gs()->movq(dest.AsRegisterPairLow(), Address::Absolute(src, true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002577 } else if (dest.IsX87Register()) {
2578 if (size == 4) {
2579 gs()->flds(Address::Absolute(src, true));
2580 } else {
2581 gs()->fldl(Address::Absolute(src, true));
2582 }
2583 } else {
2584 CHECK(dest.IsXmmRegister());
2585 if (size == 4) {
2586 gs()->movss(dest.AsXmmRegister(), Address::Absolute(src, true));
2587 } else {
2588 gs()->movsd(dest.AsXmmRegister(), Address::Absolute(src, true));
2589 }
2590 }
2591}
2592
2593void X86_64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
2594 X86_64ManagedRegister dest = mdest.AsX86_64();
2595 CHECK(dest.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002596 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002597}
2598
2599void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
2600 MemberOffset offs) {
2601 X86_64ManagedRegister dest = mdest.AsX86_64();
2602 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
Hiroshi Yamauchif8892672014-09-30 10:56:14 -07002603 movl(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
Hiroshi Yamauchib88f0b12014-09-26 14:55:38 -07002604 if (kPoisonHeapReferences) {
2605 negl(dest.AsCpuRegister());
2606 }
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002607}
2608
2609void X86_64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
2610 Offset offs) {
2611 X86_64ManagedRegister dest = mdest.AsX86_64();
2612 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
2613 movq(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
2614}
2615
Ian Rogersdd7624d2014-03-14 17:43:00 -07002616void X86_64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest, ThreadOffset<8> offs) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002617 X86_64ManagedRegister dest = mdest.AsX86_64();
2618 CHECK(dest.IsCpuRegister());
2619 gs()->movq(dest.AsCpuRegister(), Address::Absolute(offs, true));
2620}
2621
2622void X86_64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
2623 X86_64ManagedRegister reg = mreg.AsX86_64();
2624 CHECK(size == 1 || size == 2) << size;
2625 CHECK(reg.IsCpuRegister()) << reg;
2626 if (size == 1) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002627 movsxb(reg.AsCpuRegister(), reg.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002628 } else {
2629 movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
2630 }
2631}
2632
2633void X86_64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
2634 X86_64ManagedRegister reg = mreg.AsX86_64();
2635 CHECK(size == 1 || size == 2) << size;
2636 CHECK(reg.IsCpuRegister()) << reg;
2637 if (size == 1) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002638 movzxb(reg.AsCpuRegister(), reg.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002639 } else {
2640 movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
2641 }
2642}
2643
2644void X86_64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
2645 X86_64ManagedRegister dest = mdest.AsX86_64();
2646 X86_64ManagedRegister src = msrc.AsX86_64();
2647 if (!dest.Equals(src)) {
2648 if (dest.IsCpuRegister() && src.IsCpuRegister()) {
2649 movq(dest.AsCpuRegister(), src.AsCpuRegister());
2650 } else if (src.IsX87Register() && dest.IsXmmRegister()) {
2651 // Pass via stack and pop X87 register
Ian Rogersdd7624d2014-03-14 17:43:00 -07002652 subl(CpuRegister(RSP), Immediate(16));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002653 if (size == 4) {
2654 CHECK_EQ(src.AsX87Register(), ST0);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002655 fstps(Address(CpuRegister(RSP), 0));
2656 movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002657 } else {
2658 CHECK_EQ(src.AsX87Register(), ST0);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002659 fstpl(Address(CpuRegister(RSP), 0));
2660 movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002661 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002662 addq(CpuRegister(RSP), Immediate(16));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002663 } else {
2664 // TODO: x87, SSE
2665 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
2666 }
2667 }
2668}
2669
2670void X86_64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
2671 ManagedRegister mscratch) {
2672 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2673 CHECK(scratch.IsCpuRegister());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002674 movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), src));
2675 movl(Address(CpuRegister(RSP), dest), scratch.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002676}
2677
Ian Rogersdd7624d2014-03-14 17:43:00 -07002678void X86_64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
2679 ThreadOffset<8> thr_offs,
2680 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002681 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2682 CHECK(scratch.IsCpuRegister());
2683 gs()->movq(scratch.AsCpuRegister(), Address::Absolute(thr_offs, true));
2684 Store(fr_offs, scratch, 8);
2685}
2686
Ian Rogersdd7624d2014-03-14 17:43:00 -07002687void X86_64Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs,
2688 FrameOffset fr_offs,
2689 ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002690 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2691 CHECK(scratch.IsCpuRegister());
2692 Load(scratch, fr_offs, 8);
2693 gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
2694}
2695
2696void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src,
2697 ManagedRegister mscratch,
2698 size_t size) {
2699 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2700 if (scratch.IsCpuRegister() && size == 8) {
2701 Load(scratch, src, 4);
2702 Store(dest, scratch, 4);
2703 Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
2704 Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
2705 } else {
2706 Load(scratch, src, size);
2707 Store(dest, scratch, size);
2708 }
2709}
2710
2711void X86_64Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
2712 ManagedRegister /*scratch*/, size_t /*size*/) {
2713 UNIMPLEMENTED(FATAL);
2714}
2715
2716void X86_64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
2717 ManagedRegister scratch, size_t size) {
2718 CHECK(scratch.IsNoRegister());
2719 CHECK_EQ(size, 4u);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002720 pushq(Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002721 popq(Address(dest_base.AsX86_64().AsCpuRegister(), dest_offset));
2722}
2723
2724void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
2725 ManagedRegister mscratch, size_t size) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002726 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002727 CHECK_EQ(size, 4u);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002728 movq(scratch, Address(CpuRegister(RSP), src_base));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002729 movq(scratch, Address(scratch, src_offset));
Ian Rogersdd7624d2014-03-14 17:43:00 -07002730 movq(Address(CpuRegister(RSP), dest), scratch);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002731}
2732
2733void X86_64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
2734 ManagedRegister src, Offset src_offset,
2735 ManagedRegister scratch, size_t size) {
2736 CHECK_EQ(size, 4u);
2737 CHECK(scratch.IsNoRegister());
2738 pushq(Address(src.AsX86_64().AsCpuRegister(), src_offset));
2739 popq(Address(dest.AsX86_64().AsCpuRegister(), dest_offset));
2740}
2741
2742void X86_64Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
2743 ManagedRegister mscratch, size_t size) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002744 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002745 CHECK_EQ(size, 4u);
2746 CHECK_EQ(dest.Int32Value(), src.Int32Value());
Ian Rogersdd7624d2014-03-14 17:43:00 -07002747 movq(scratch, Address(CpuRegister(RSP), src));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002748 pushq(Address(scratch, src_offset));
2749 popq(Address(scratch, dest_offset));
2750}
2751
2752void X86_64Assembler::MemoryBarrier(ManagedRegister) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002753 mfence();
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002754}
2755
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002756void X86_64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
2757 FrameOffset handle_scope_offset,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002758 ManagedRegister min_reg, bool null_allowed) {
2759 X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
2760 X86_64ManagedRegister in_reg = min_reg.AsX86_64();
2761 if (in_reg.IsNoRegister()) { // TODO(64): && null_allowed
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002762 // Use out_reg as indicator of null.
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002763 in_reg = out_reg;
2764 // TODO: movzwl
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002765 movl(in_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002766 }
2767 CHECK(in_reg.IsCpuRegister());
2768 CHECK(out_reg.IsCpuRegister());
2769 VerifyObject(in_reg, null_allowed);
2770 if (null_allowed) {
2771 Label null_arg;
2772 if (!out_reg.Equals(in_reg)) {
2773 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2774 }
2775 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2776 j(kZero, &null_arg);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002777 leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002778 Bind(&null_arg);
2779 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002780 leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002781 }
2782}
2783
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002784void X86_64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
2785 FrameOffset handle_scope_offset,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002786 ManagedRegister mscratch,
2787 bool null_allowed) {
2788 X86_64ManagedRegister scratch = mscratch.AsX86_64();
2789 CHECK(scratch.IsCpuRegister());
2790 if (null_allowed) {
2791 Label null_arg;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002792 movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002793 testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
2794 j(kZero, &null_arg);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002795 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002796 Bind(&null_arg);
2797 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002798 leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002799 }
2800 Store(out_off, scratch, 8);
2801}
2802
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002803// Given a handle scope entry, load the associated reference.
2804void X86_64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002805 ManagedRegister min_reg) {
2806 X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
2807 X86_64ManagedRegister in_reg = min_reg.AsX86_64();
2808 CHECK(out_reg.IsCpuRegister());
2809 CHECK(in_reg.IsCpuRegister());
2810 Label null_arg;
2811 if (!out_reg.Equals(in_reg)) {
2812 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2813 }
2814 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2815 j(kZero, &null_arg);
2816 movq(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
2817 Bind(&null_arg);
2818}
2819
2820void X86_64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
2821 // TODO: not validating references
2822}
2823
2824void X86_64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
2825 // TODO: not validating references
2826}
2827
2828void X86_64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
2829 X86_64ManagedRegister base = mbase.AsX86_64();
2830 CHECK(base.IsCpuRegister());
2831 call(Address(base.AsCpuRegister(), offset.Int32Value()));
2832 // TODO: place reference map on call
2833}
2834
2835void X86_64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002836 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
Andreas Gampecf4035a2014-05-28 22:43:01 -07002837 movl(scratch, Address(CpuRegister(RSP), base));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002838 call(Address(scratch, offset));
2839}
2840
Ian Rogersdd7624d2014-03-14 17:43:00 -07002841void X86_64Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister /*mscratch*/) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002842 gs()->call(Address::Absolute(offset, true));
2843}
2844
2845void X86_64Assembler::GetCurrentThread(ManagedRegister tr) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002846 gs()->movq(tr.AsX86_64().AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002847}
2848
Ian Rogersdd7624d2014-03-14 17:43:00 -07002849void X86_64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister mscratch) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002850 X86_64ManagedRegister scratch = mscratch.AsX86_64();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002851 gs()->movq(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
2852 movq(Address(CpuRegister(RSP), offset), scratch.AsCpuRegister());
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002853}
2854
Ian Rogersdd7624d2014-03-14 17:43:00 -07002855// Slowpath entered when Thread::Current()->_exception is non-null
2856class X86_64ExceptionSlowPath FINAL : public SlowPath {
2857 public:
2858 explicit X86_64ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {}
2859 virtual void Emit(Assembler *sp_asm) OVERRIDE;
2860 private:
2861 const size_t stack_adjust_;
2862};
2863
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002864void X86_64Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07002865 X86_64ExceptionSlowPath* slow = new X86_64ExceptionSlowPath(stack_adjust);
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002866 buffer_.EnqueueSlowPath(slow);
Ian Rogersdd7624d2014-03-14 17:43:00 -07002867 gs()->cmpl(Address::Absolute(Thread::ExceptionOffset<8>(), true), Immediate(0));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002868 j(kNotEqual, slow->Entry());
2869}
2870
Ian Rogersdd7624d2014-03-14 17:43:00 -07002871void X86_64ExceptionSlowPath::Emit(Assembler *sasm) {
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002872 X86_64Assembler* sp_asm = down_cast<X86_64Assembler*>(sasm);
2873#define __ sp_asm->
2874 __ Bind(&entry_);
2875 // Note: the return value is dead
2876 if (stack_adjust_ != 0) { // Fix up the frame.
2877 __ DecreaseFrameSize(stack_adjust_);
2878 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07002879 // Pass exception as argument in RDI
2880 __ gs()->movq(CpuRegister(RDI), Address::Absolute(Thread::ExceptionOffset<8>(), true));
2881 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(8, pDeliverException), true));
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002882 // this call should never return
2883 __ int3();
2884#undef __
2885}
2886
Mark Mendellf55c3e02015-03-26 21:07:46 -04002887void X86_64Assembler::AddConstantArea() {
2888 const std::vector<int32_t>& area = constant_area_.GetBuffer();
Mark Mendell39dcf552015-04-09 20:42:42 -04002889 for (size_t i = 0, e = area.size(); i < e; i++) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002890 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2891 EmitInt32(area[i]);
2892 }
2893}
2894
2895int ConstantArea::AddInt32(int32_t v) {
Mark Mendell39dcf552015-04-09 20:42:42 -04002896 for (size_t i = 0, e = buffer_.size(); i < e; i++) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002897 if (v == buffer_[i]) {
2898 return i * elem_size_;
2899 }
2900 }
2901
2902 // Didn't match anything.
2903 int result = buffer_.size() * elem_size_;
2904 buffer_.push_back(v);
2905 return result;
2906}
2907
2908int ConstantArea::AddInt64(int64_t v) {
2909 int32_t v_low = v;
2910 int32_t v_high = v >> 32;
2911 if (buffer_.size() > 1) {
2912 // Ensure we don't pass the end of the buffer.
Mark Mendell39dcf552015-04-09 20:42:42 -04002913 for (size_t i = 0, e = buffer_.size() - 1; i < e; i++) {
2914 if (v_low == buffer_[i] && v_high == buffer_[i + 1]) {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002915 return i * elem_size_;
2916 }
2917 }
2918 }
2919
2920 // Didn't match anything.
2921 int result = buffer_.size() * elem_size_;
2922 buffer_.push_back(v_low);
2923 buffer_.push_back(v_high);
2924 return result;
2925}
2926
2927int ConstantArea::AddDouble(double v) {
2928 // Treat the value as a 64-bit integer value.
2929 return AddInt64(bit_cast<int64_t, double>(v));
2930}
2931
2932int ConstantArea::AddFloat(float v) {
2933 // Treat the value as a 32-bit integer value.
2934 return AddInt32(bit_cast<int32_t, float>(v));
2935}
2936
Dmitry Petrochenkofca82202014-03-21 11:21:37 +07002937} // namespace x86_64
2938} // namespace art