blob: a90a584cde043c2724b1e7bdb00319a4b4840ae1 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/x64/assembler-x64.h"
13#include "src/x64/macro-assembler-x64.h"
14
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022#define kScratchDoubleReg xmm0
23
24
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025// Adds X64 specific methods for decoding operands.
26class X64OperandConverter : public InstructionOperandConverter {
27 public:
28 X64OperandConverter(CodeGenerator* gen, Instruction* instr)
29 : InstructionOperandConverter(gen, instr) {}
30
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 Immediate InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032 return ToImmediate(instr_->InputAt(index));
33 }
34
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 Operand InputOperand(size_t index, int extra = 0) {
36 return ToOperand(instr_->InputAt(index), extra);
37 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 Operand OutputOperand() { return ToOperand(instr_->Output()); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040
41 Immediate ToImmediate(InstructionOperand* operand) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 Constant constant = ToConstant(operand);
43 if (constant.type() == Constant::kFloat64) {
44 DCHECK_EQ(0, bit_cast<int64_t>(constant.ToFloat64()));
45 return Immediate(0);
46 }
Ben Murdochc5610432016-08-08 18:44:38 +010047 if (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
48 constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
49 return Immediate(constant.ToInt32(), constant.rmode());
50 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 return Immediate(constant.ToInt32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 }
53
54 Operand ToOperand(InstructionOperand* op, int extra = 0) {
Ben Murdochc5610432016-08-08 18:44:38 +010055 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010056 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra);
57 }
58
59 Operand SlotToOperand(int slot_index, int extra = 0) {
60 FrameOffset offset = frame_access_state()->GetFrameOffset(slot_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 return Operand(offset.from_stack_pointer() ? rsp : rbp,
62 offset.offset() + extra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063 }
64
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000065 static size_t NextOffset(size_t* offset) {
66 size_t i = *offset;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040067 (*offset)++;
68 return i;
69 }
70
71 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
72 STATIC_ASSERT(0 == static_cast<int>(times_1));
73 STATIC_ASSERT(1 == static_cast<int>(times_2));
74 STATIC_ASSERT(2 == static_cast<int>(times_4));
75 STATIC_ASSERT(3 == static_cast<int>(times_8));
76 int scale = static_cast<int>(mode - one);
77 DCHECK(scale >= 0 && scale < 4);
78 return static_cast<ScaleFactor>(scale);
79 }
80
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000081 Operand MemoryOperand(size_t* offset) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040082 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
83 switch (mode) {
84 case kMode_MR: {
85 Register base = InputRegister(NextOffset(offset));
86 int32_t disp = 0;
87 return Operand(base, disp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089 case kMode_MRI: {
90 Register base = InputRegister(NextOffset(offset));
91 int32_t disp = InputInt32(NextOffset(offset));
92 return Operand(base, disp);
93 }
94 case kMode_MR1:
95 case kMode_MR2:
96 case kMode_MR4:
97 case kMode_MR8: {
98 Register base = InputRegister(NextOffset(offset));
99 Register index = InputRegister(NextOffset(offset));
100 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
101 int32_t disp = 0;
102 return Operand(base, index, scale, disp);
103 }
104 case kMode_MR1I:
105 case kMode_MR2I:
106 case kMode_MR4I:
107 case kMode_MR8I: {
108 Register base = InputRegister(NextOffset(offset));
109 Register index = InputRegister(NextOffset(offset));
110 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
111 int32_t disp = InputInt32(NextOffset(offset));
112 return Operand(base, index, scale, disp);
113 }
114 case kMode_M1: {
115 Register base = InputRegister(NextOffset(offset));
116 int32_t disp = 0;
117 return Operand(base, disp);
118 }
119 case kMode_M2:
120 UNREACHABLE(); // Should use kModeMR with more compact encoding instead
121 return Operand(no_reg, 0);
122 case kMode_M4:
123 case kMode_M8: {
124 Register index = InputRegister(NextOffset(offset));
125 ScaleFactor scale = ScaleFor(kMode_M1, mode);
126 int32_t disp = 0;
127 return Operand(index, scale, disp);
128 }
129 case kMode_M1I:
130 case kMode_M2I:
131 case kMode_M4I:
132 case kMode_M8I: {
133 Register index = InputRegister(NextOffset(offset));
134 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
135 int32_t disp = InputInt32(NextOffset(offset));
136 return Operand(index, scale, disp);
137 }
138 case kMode_None:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 UNREACHABLE();
140 return Operand(no_reg, 0);
141 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400142 UNREACHABLE();
143 return Operand(no_reg, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 }
145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000146 Operand MemoryOperand(size_t first_input = 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 return MemoryOperand(&first_input);
148 }
149};
150
151
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400152namespace {
153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154bool HasImmediateInput(Instruction* instr, size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 return instr->InputAt(index)->IsImmediate();
156}
157
158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400160 public:
161 OutOfLineLoadZero(CodeGenerator* gen, Register result)
162 : OutOfLineCode(gen), result_(result) {}
163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 void Generate() final { __ xorl(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400165
166 private:
167 Register const result_;
168};
169
170
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000171class OutOfLineLoadNaN final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400172 public:
173 OutOfLineLoadNaN(CodeGenerator* gen, XMMRegister result)
174 : OutOfLineCode(gen), result_(result) {}
175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 void Generate() final { __ Pcmpeqd(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400177
178 private:
179 XMMRegister const result_;
180};
181
182
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400184 public:
185 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
186 XMMRegister input)
187 : OutOfLineCode(gen), result_(result), input_(input) {}
188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400190 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191 __ Movsd(MemOperand(rsp, 0), input_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400192 __ SlowTruncateToI(result_, rsp, 0);
193 __ addp(rsp, Immediate(kDoubleSize));
194 }
195
196 private:
197 Register const result_;
198 XMMRegister const input_;
199};
200
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000201
202class OutOfLineRecordWrite final : public OutOfLineCode {
203 public:
204 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
205 Register value, Register scratch0, Register scratch1,
206 RecordWriteMode mode)
207 : OutOfLineCode(gen),
208 object_(object),
209 operand_(operand),
210 value_(value),
211 scratch0_(scratch0),
212 scratch1_(scratch1),
213 mode_(mode) {}
214
215 void Generate() final {
216 if (mode_ > RecordWriteMode::kValueIsPointer) {
217 __ JumpIfSmi(value_, exit());
218 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100219 __ CheckPageFlag(value_, scratch0_,
220 MemoryChunk::kPointersToHereAreInterestingMask, zero,
221 exit());
222 RememberedSetAction const remembered_set_action =
223 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
224 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 SaveFPRegsMode const save_fp_mode =
226 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
227 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100228 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000229 __ leap(scratch1_, operand_);
230 __ CallStub(&stub);
231 }
232
233 private:
234 Register const object_;
235 Operand const operand_;
236 Register const value_;
237 Register const scratch0_;
238 Register const scratch1_;
239 RecordWriteMode const mode_;
240};
241
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400242} // namespace
243
244
245#define ASSEMBLE_UNOP(asm_instr) \
246 do { \
247 if (instr->Output()->IsRegister()) { \
248 __ asm_instr(i.OutputRegister()); \
249 } else { \
250 __ asm_instr(i.OutputOperand()); \
251 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 } while (0)
253
254
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400255#define ASSEMBLE_BINOP(asm_instr) \
256 do { \
257 if (HasImmediateInput(instr, 1)) { \
258 if (instr->InputAt(0)->IsRegister()) { \
259 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
260 } else { \
261 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
262 } \
263 } else { \
264 if (instr->InputAt(1)->IsRegister()) { \
265 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
266 } else { \
267 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
268 } \
269 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 } while (0)
271
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272#define ASSEMBLE_COMPARE(asm_instr) \
273 do { \
274 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \
275 size_t index = 0; \
276 Operand left = i.MemoryOperand(&index); \
277 if (HasImmediateInput(instr, index)) { \
278 __ asm_instr(left, i.InputImmediate(index)); \
279 } else { \
280 __ asm_instr(left, i.InputRegister(index)); \
281 } \
282 } else { \
283 if (HasImmediateInput(instr, 1)) { \
284 if (instr->InputAt(0)->IsRegister()) { \
285 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
286 } else { \
287 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
288 } \
289 } else { \
290 if (instr->InputAt(1)->IsRegister()) { \
291 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
292 } else { \
293 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
294 } \
295 } \
296 } \
297 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400299#define ASSEMBLE_MULT(asm_instr) \
300 do { \
301 if (HasImmediateInput(instr, 1)) { \
302 if (instr->InputAt(0)->IsRegister()) { \
303 __ asm_instr(i.OutputRegister(), i.InputRegister(0), \
304 i.InputImmediate(1)); \
305 } else { \
306 __ asm_instr(i.OutputRegister(), i.InputOperand(0), \
307 i.InputImmediate(1)); \
308 } \
309 } else { \
310 if (instr->InputAt(1)->IsRegister()) { \
311 __ asm_instr(i.OutputRegister(), i.InputRegister(1)); \
312 } else { \
313 __ asm_instr(i.OutputRegister(), i.InputOperand(1)); \
314 } \
315 } \
316 } while (0)
317
318
319#define ASSEMBLE_SHIFT(asm_instr, width) \
320 do { \
321 if (HasImmediateInput(instr, 1)) { \
322 if (instr->Output()->IsRegister()) { \
323 __ asm_instr(i.OutputRegister(), Immediate(i.InputInt##width(1))); \
324 } else { \
325 __ asm_instr(i.OutputOperand(), Immediate(i.InputInt##width(1))); \
326 } \
327 } else { \
328 if (instr->Output()->IsRegister()) { \
329 __ asm_instr##_cl(i.OutputRegister()); \
330 } else { \
331 __ asm_instr##_cl(i.OutputOperand()); \
332 } \
333 } \
334 } while (0)
335
336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337#define ASSEMBLE_MOVX(asm_instr) \
338 do { \
339 if (instr->addressing_mode() != kMode_None) { \
340 __ asm_instr(i.OutputRegister(), i.MemoryOperand()); \
341 } else if (instr->InputAt(0)->IsRegister()) { \
342 __ asm_instr(i.OutputRegister(), i.InputRegister(0)); \
343 } else { \
344 __ asm_instr(i.OutputRegister(), i.InputOperand(0)); \
345 } \
346 } while (0)
347
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348#define ASSEMBLE_SSE_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400349 do { \
Ben Murdochc5610432016-08-08 18:44:38 +0100350 if (instr->InputAt(1)->IsFPRegister()) { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400351 __ asm_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
352 } else { \
353 __ asm_instr(i.InputDoubleRegister(0), i.InputOperand(1)); \
354 } \
355 } while (0)
356
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000357#define ASSEMBLE_SSE_UNOP(asm_instr) \
358 do { \
Ben Murdochc5610432016-08-08 18:44:38 +0100359 if (instr->InputAt(0)->IsFPRegister()) { \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
361 } else { \
362 __ asm_instr(i.OutputDoubleRegister(), i.InputOperand(0)); \
363 } \
364 } while (0)
365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366#define ASSEMBLE_AVX_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400367 do { \
368 CpuFeatureScope avx_scope(masm(), AVX); \
Ben Murdochc5610432016-08-08 18:44:38 +0100369 if (instr->InputAt(1)->IsFPRegister()) { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400370 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
371 i.InputDoubleRegister(1)); \
372 } else { \
373 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
374 i.InputOperand(1)); \
375 } \
376 } while (0)
377
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400378#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
379 do { \
380 auto result = i.OutputDoubleRegister(); \
381 auto buffer = i.InputRegister(0); \
382 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100383 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 OutOfLineCode* ool; \
385 if (instr->InputAt(3)->IsRegister()) { \
386 auto length = i.InputRegister(3); \
387 DCHECK_EQ(0, index2); \
388 __ cmpl(index1, length); \
389 ool = new (zone()) OutOfLineLoadNaN(this, result); \
390 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100391 auto length = i.InputUint32(3); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400392 DCHECK_LE(index2, length); \
Ben Murdochc5610432016-08-08 18:44:38 +0100393 __ cmpl(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000394 class OutOfLineLoadFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400395 public: \
396 OutOfLineLoadFloat(CodeGenerator* gen, XMMRegister result, \
397 Register buffer, Register index1, int32_t index2, \
398 int32_t length) \
399 : OutOfLineCode(gen), \
400 result_(result), \
401 buffer_(buffer), \
402 index1_(index1), \
403 index2_(index2), \
404 length_(length) {} \
405 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000406 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400407 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408 __ Pcmpeqd(result_, result_); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400409 __ cmpl(kScratchRegister, Immediate(length_)); \
410 __ j(above_equal, exit()); \
411 __ asm_instr(result_, \
412 Operand(buffer_, kScratchRegister, times_1, 0)); \
413 } \
414 \
415 private: \
416 XMMRegister const result_; \
417 Register const buffer_; \
418 Register const index1_; \
419 int32_t const index2_; \
420 int32_t const length_; \
421 }; \
422 ool = new (zone()) \
423 OutOfLineLoadFloat(this, result, buffer, index1, index2, length); \
424 } \
425 __ j(above_equal, ool->entry()); \
426 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
427 __ bind(ool->exit()); \
428 } while (false)
429
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400430#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
431 do { \
432 auto result = i.OutputRegister(); \
433 auto buffer = i.InputRegister(0); \
434 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100435 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400436 OutOfLineCode* ool; \
437 if (instr->InputAt(3)->IsRegister()) { \
438 auto length = i.InputRegister(3); \
439 DCHECK_EQ(0, index2); \
440 __ cmpl(index1, length); \
441 ool = new (zone()) OutOfLineLoadZero(this, result); \
442 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100443 auto length = i.InputUint32(3); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400444 DCHECK_LE(index2, length); \
Ben Murdochc5610432016-08-08 18:44:38 +0100445 __ cmpl(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446 class OutOfLineLoadInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400447 public: \
448 OutOfLineLoadInteger(CodeGenerator* gen, Register result, \
449 Register buffer, Register index1, int32_t index2, \
450 int32_t length) \
451 : OutOfLineCode(gen), \
452 result_(result), \
453 buffer_(buffer), \
454 index1_(index1), \
455 index2_(index2), \
456 length_(length) {} \
457 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400459 Label oob; \
460 __ leal(kScratchRegister, Operand(index1_, index2_)); \
461 __ cmpl(kScratchRegister, Immediate(length_)); \
462 __ j(above_equal, &oob, Label::kNear); \
463 __ asm_instr(result_, \
464 Operand(buffer_, kScratchRegister, times_1, 0)); \
465 __ jmp(exit()); \
466 __ bind(&oob); \
467 __ xorl(result_, result_); \
468 } \
469 \
470 private: \
471 Register const result_; \
472 Register const buffer_; \
473 Register const index1_; \
474 int32_t const index2_; \
475 int32_t const length_; \
476 }; \
477 ool = new (zone()) \
478 OutOfLineLoadInteger(this, result, buffer, index1, index2, length); \
479 } \
480 __ j(above_equal, ool->entry()); \
481 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
482 __ bind(ool->exit()); \
483 } while (false)
484
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400485#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
486 do { \
487 auto buffer = i.InputRegister(0); \
488 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100489 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400490 auto value = i.InputDoubleRegister(4); \
491 if (instr->InputAt(3)->IsRegister()) { \
492 auto length = i.InputRegister(3); \
493 DCHECK_EQ(0, index2); \
494 Label done; \
495 __ cmpl(index1, length); \
496 __ j(above_equal, &done, Label::kNear); \
497 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
498 __ bind(&done); \
499 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100500 auto length = i.InputUint32(3); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400501 DCHECK_LE(index2, length); \
Ben Murdochc5610432016-08-08 18:44:38 +0100502 __ cmpl(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000503 class OutOfLineStoreFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 public: \
505 OutOfLineStoreFloat(CodeGenerator* gen, Register buffer, \
506 Register index1, int32_t index2, int32_t length, \
507 XMMRegister value) \
508 : OutOfLineCode(gen), \
509 buffer_(buffer), \
510 index1_(index1), \
511 index2_(index2), \
512 length_(length), \
513 value_(value) {} \
514 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400516 __ leal(kScratchRegister, Operand(index1_, index2_)); \
517 __ cmpl(kScratchRegister, Immediate(length_)); \
518 __ j(above_equal, exit()); \
519 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
520 value_); \
521 } \
522 \
523 private: \
524 Register const buffer_; \
525 Register const index1_; \
526 int32_t const index2_; \
527 int32_t const length_; \
528 XMMRegister const value_; \
529 }; \
530 auto ool = new (zone()) \
531 OutOfLineStoreFloat(this, buffer, index1, index2, length, value); \
532 __ j(above_equal, ool->entry()); \
533 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
534 __ bind(ool->exit()); \
535 } \
536 } while (false)
537
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400538#define ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Value) \
539 do { \
540 auto buffer = i.InputRegister(0); \
541 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100542 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400543 if (instr->InputAt(3)->IsRegister()) { \
544 auto length = i.InputRegister(3); \
545 DCHECK_EQ(0, index2); \
546 Label done; \
547 __ cmpl(index1, length); \
548 __ j(above_equal, &done, Label::kNear); \
549 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
550 __ bind(&done); \
551 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100552 auto length = i.InputUint32(3); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553 DCHECK_LE(index2, length); \
Ben Murdochc5610432016-08-08 18:44:38 +0100554 __ cmpl(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555 class OutOfLineStoreInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400556 public: \
557 OutOfLineStoreInteger(CodeGenerator* gen, Register buffer, \
558 Register index1, int32_t index2, int32_t length, \
559 Value value) \
560 : OutOfLineCode(gen), \
561 buffer_(buffer), \
562 index1_(index1), \
563 index2_(index2), \
564 length_(length), \
565 value_(value) {} \
566 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400568 __ leal(kScratchRegister, Operand(index1_, index2_)); \
569 __ cmpl(kScratchRegister, Immediate(length_)); \
570 __ j(above_equal, exit()); \
571 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
572 value_); \
573 } \
574 \
575 private: \
576 Register const buffer_; \
577 Register const index1_; \
578 int32_t const index2_; \
579 int32_t const length_; \
580 Value const value_; \
581 }; \
582 auto ool = new (zone()) \
583 OutOfLineStoreInteger(this, buffer, index1, index2, length, value); \
584 __ j(above_equal, ool->entry()); \
585 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
586 __ bind(ool->exit()); \
587 } \
588 } while (false)
589
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400590#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
591 do { \
592 if (instr->InputAt(4)->IsRegister()) { \
593 Register value = i.InputRegister(4); \
594 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Register); \
595 } else { \
596 Immediate value = i.InputImmediate(4); \
597 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Immediate); \
598 } \
599 } while (false)
600
Ben Murdochda12d292016-06-02 14:46:10 +0100601void CodeGenerator::AssembleDeconstructFrame() {
602 __ movq(rsp, rbp);
603 __ popq(rbp);
604}
605
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000606void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
607 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
608 if (sp_slot_delta > 0) {
609 __ addq(rsp, Immediate(sp_slot_delta * kPointerSize));
610 }
611 frame_access_state()->SetFrameAccessToDefault();
612}
613
614
615void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
616 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
617 if (sp_slot_delta < 0) {
618 __ subq(rsp, Immediate(-sp_slot_delta * kPointerSize));
619 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
620 }
Ben Murdochda12d292016-06-02 14:46:10 +0100621 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 __ movq(rbp, MemOperand(rbp, 0));
623 }
624 frame_access_state()->SetFrameAccessToSP();
625}
626
Ben Murdochda12d292016-06-02 14:46:10 +0100627void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
628 Register scratch1,
629 Register scratch2,
630 Register scratch3) {
631 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
632 Label done;
633
634 // Check if current frame is an arguments adaptor frame.
635 __ Cmp(Operand(rbp, StandardFrameConstants::kContextOffset),
636 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
637 __ j(not_equal, &done, Label::kNear);
638
639 // Load arguments count from current arguments adaptor frame (note, it
640 // does not include receiver).
641 Register caller_args_count_reg = scratch1;
642 __ SmiToInteger32(
643 caller_args_count_reg,
644 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
645
646 ParameterCount callee_args_count(args_reg);
647 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
648 scratch3, ReturnAddressState::kOnStack);
649 __ bind(&done);
650}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100653CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
654 Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 X64OperandConverter i(this, instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100656 InstructionCode opcode = instr->opcode();
657 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
658 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659 case kArchCallCodeObject: {
660 EnsureSpaceForLazyDeopt();
661 if (HasImmediateInput(instr, 0)) {
662 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
663 __ Call(code, RelocInfo::CODE_TARGET);
664 } else {
665 Register reg = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000666 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
667 __ call(reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000668 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669 RecordCallPosition(instr);
670 frame_access_state()->ClearSPDelta();
671 break;
672 }
Ben Murdochda12d292016-06-02 14:46:10 +0100673 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000674 case kArchTailCallCodeObject: {
675 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
676 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100677 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
678 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
679 i.TempRegister(0), i.TempRegister(1),
680 i.TempRegister(2));
681 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000682 if (HasImmediateInput(instr, 0)) {
683 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
684 __ jmp(code, RelocInfo::CODE_TARGET);
685 } else {
686 Register reg = i.InputRegister(0);
687 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
688 __ jmp(reg);
689 }
690 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691 break;
692 }
Ben Murdochc5610432016-08-08 18:44:38 +0100693 case kArchTailCallAddress: {
694 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
695 AssembleDeconstructActivationRecord(stack_param_delta);
696 CHECK(!HasImmediateInput(instr, 0));
697 Register reg = i.InputRegister(0);
698 __ jmp(reg);
699 frame_access_state()->ClearSPDelta();
700 break;
701 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000702 case kArchCallJSFunction: {
703 EnsureSpaceForLazyDeopt();
704 Register func = i.InputRegister(0);
705 if (FLAG_debug_code) {
706 // Check the function's context matches the context argument.
707 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
708 __ Assert(equal, kWrongFunctionContext);
709 }
710 __ Call(FieldOperand(func, JSFunction::kCodeEntryOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711 frame_access_state()->ClearSPDelta();
712 RecordCallPosition(instr);
713 break;
714 }
Ben Murdochda12d292016-06-02 14:46:10 +0100715 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000716 case kArchTailCallJSFunction: {
717 Register func = i.InputRegister(0);
718 if (FLAG_debug_code) {
719 // Check the function's context matches the context argument.
720 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
721 __ Assert(equal, kWrongFunctionContext);
722 }
723 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
724 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100725 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
726 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
727 i.TempRegister(0), i.TempRegister(1),
728 i.TempRegister(2));
729 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000730 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
731 frame_access_state()->ClearSPDelta();
732 break;
733 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 case kArchPrepareCallCFunction: {
735 // Frame alignment requires using FP-relative frame addressing.
736 frame_access_state()->SetFrameAccessToFP();
737 int const num_parameters = MiscField::decode(instr->opcode());
738 __ PrepareCallCFunction(num_parameters);
739 break;
740 }
741 case kArchPrepareTailCall:
742 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
743 break;
744 case kArchCallCFunction: {
745 int const num_parameters = MiscField::decode(instr->opcode());
746 if (HasImmediateInput(instr, 0)) {
747 ExternalReference ref = i.InputExternalReference(0);
748 __ CallCFunction(ref, num_parameters);
749 } else {
750 Register func = i.InputRegister(0);
751 __ CallCFunction(func, num_parameters);
752 }
753 frame_access_state()->SetFrameAccessToDefault();
754 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 break;
756 }
757 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000760 case kArchLookupSwitch:
761 AssembleArchLookupSwitch(instr);
762 break;
763 case kArchTableSwitch:
764 AssembleArchTableSwitch(instr);
765 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000767 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 // don't emit code for nops.
769 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000770 case kArchDeoptimize: {
771 int deopt_state_id =
772 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
773 Deoptimizer::BailoutType bailout_type =
774 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100775 CodeGenResult result =
776 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
777 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 break;
779 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 case kArchRet:
781 AssembleReturn();
782 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400783 case kArchStackPointer:
784 __ movq(i.OutputRegister(), rsp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000786 case kArchFramePointer:
787 __ movq(i.OutputRegister(), rbp);
788 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100789 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100790 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100791 __ movq(i.OutputRegister(), Operand(rbp, 0));
792 } else {
793 __ movq(i.OutputRegister(), rbp);
794 }
795 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400796 case kArchTruncateDoubleToI: {
797 auto result = i.OutputRegister();
798 auto input = i.InputDoubleRegister(0);
799 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input);
Ben Murdochc5610432016-08-08 18:44:38 +0100800 // We use Cvttsd2siq instead of Cvttsd2si due to performance reasons. The
801 // use of Cvttsd2siq requires the movl below to avoid sign extension.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000802 __ Cvttsd2siq(result, input);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400803 __ cmpq(result, Immediate(1));
804 __ j(overflow, ool->entry());
805 __ bind(ool->exit());
Ben Murdochc5610432016-08-08 18:44:38 +0100806 __ movl(result, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400807 break;
808 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000809 case kArchStoreWithWriteBarrier: {
810 RecordWriteMode mode =
811 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
812 Register object = i.InputRegister(0);
813 size_t index = 0;
814 Operand operand = i.MemoryOperand(&index);
815 Register value = i.InputRegister(index);
816 Register scratch0 = i.TempRegister(0);
817 Register scratch1 = i.TempRegister(1);
818 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
819 scratch0, scratch1, mode);
820 __ movp(operand, value);
821 __ CheckPageFlag(object, scratch0,
822 MemoryChunk::kPointersFromHereAreInterestingMask,
823 not_zero, ool->entry());
824 __ bind(ool->exit());
825 break;
826 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100827 case kArchStackSlot: {
828 FrameOffset offset =
829 frame_access_state()->GetFrameOffset(i.InputInt32(0));
830 Register base;
831 if (offset.from_stack_pointer()) {
832 base = rsp;
833 } else {
834 base = rbp;
835 }
836 __ leaq(i.OutputRegister(), Operand(base, offset.offset()));
837 break;
838 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000839 case kX64Add32:
840 ASSEMBLE_BINOP(addl);
841 break;
842 case kX64Add:
843 ASSEMBLE_BINOP(addq);
844 break;
845 case kX64Sub32:
846 ASSEMBLE_BINOP(subl);
847 break;
848 case kX64Sub:
849 ASSEMBLE_BINOP(subq);
850 break;
851 case kX64And32:
852 ASSEMBLE_BINOP(andl);
853 break;
854 case kX64And:
855 ASSEMBLE_BINOP(andq);
856 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100857 case kX64Cmp8:
858 ASSEMBLE_COMPARE(cmpb);
859 break;
860 case kX64Cmp16:
861 ASSEMBLE_COMPARE(cmpw);
862 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000863 case kX64Cmp32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100864 ASSEMBLE_COMPARE(cmpl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000865 break;
866 case kX64Cmp:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100867 ASSEMBLE_COMPARE(cmpq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100869 case kX64Test8:
870 ASSEMBLE_COMPARE(testb);
871 break;
872 case kX64Test16:
873 ASSEMBLE_COMPARE(testw);
874 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000875 case kX64Test32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100876 ASSEMBLE_COMPARE(testl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 break;
878 case kX64Test:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100879 ASSEMBLE_COMPARE(testq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 break;
881 case kX64Imul32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400882 ASSEMBLE_MULT(imull);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 break;
884 case kX64Imul:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400885 ASSEMBLE_MULT(imulq);
886 break;
887 case kX64ImulHigh32:
888 if (instr->InputAt(1)->IsRegister()) {
889 __ imull(i.InputRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400891 __ imull(i.InputOperand(1));
892 }
893 break;
894 case kX64UmulHigh32:
895 if (instr->InputAt(1)->IsRegister()) {
896 __ mull(i.InputRegister(1));
897 } else {
898 __ mull(i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 }
900 break;
901 case kX64Idiv32:
902 __ cdq();
903 __ idivl(i.InputRegister(1));
904 break;
905 case kX64Idiv:
906 __ cqo();
907 __ idivq(i.InputRegister(1));
908 break;
909 case kX64Udiv32:
910 __ xorl(rdx, rdx);
911 __ divl(i.InputRegister(1));
912 break;
913 case kX64Udiv:
914 __ xorq(rdx, rdx);
915 __ divq(i.InputRegister(1));
916 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400917 case kX64Not:
918 ASSEMBLE_UNOP(notq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400920 case kX64Not32:
921 ASSEMBLE_UNOP(notl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000922 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400923 case kX64Neg:
924 ASSEMBLE_UNOP(negq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000925 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400926 case kX64Neg32:
927 ASSEMBLE_UNOP(negl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 case kX64Or32:
930 ASSEMBLE_BINOP(orl);
931 break;
932 case kX64Or:
933 ASSEMBLE_BINOP(orq);
934 break;
935 case kX64Xor32:
936 ASSEMBLE_BINOP(xorl);
937 break;
938 case kX64Xor:
939 ASSEMBLE_BINOP(xorq);
940 break;
941 case kX64Shl32:
942 ASSEMBLE_SHIFT(shll, 5);
943 break;
944 case kX64Shl:
945 ASSEMBLE_SHIFT(shlq, 6);
946 break;
947 case kX64Shr32:
948 ASSEMBLE_SHIFT(shrl, 5);
949 break;
950 case kX64Shr:
951 ASSEMBLE_SHIFT(shrq, 6);
952 break;
953 case kX64Sar32:
954 ASSEMBLE_SHIFT(sarl, 5);
955 break;
956 case kX64Sar:
957 ASSEMBLE_SHIFT(sarq, 6);
958 break;
959 case kX64Ror32:
960 ASSEMBLE_SHIFT(rorl, 5);
961 break;
962 case kX64Ror:
963 ASSEMBLE_SHIFT(rorq, 6);
964 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965 case kX64Lzcnt:
966 if (instr->InputAt(0)->IsRegister()) {
967 __ Lzcntq(i.OutputRegister(), i.InputRegister(0));
968 } else {
969 __ Lzcntq(i.OutputRegister(), i.InputOperand(0));
970 }
971 break;
972 case kX64Lzcnt32:
973 if (instr->InputAt(0)->IsRegister()) {
974 __ Lzcntl(i.OutputRegister(), i.InputRegister(0));
975 } else {
976 __ Lzcntl(i.OutputRegister(), i.InputOperand(0));
977 }
978 break;
979 case kX64Tzcnt:
980 if (instr->InputAt(0)->IsRegister()) {
981 __ Tzcntq(i.OutputRegister(), i.InputRegister(0));
982 } else {
983 __ Tzcntq(i.OutputRegister(), i.InputOperand(0));
984 }
985 break;
986 case kX64Tzcnt32:
987 if (instr->InputAt(0)->IsRegister()) {
988 __ Tzcntl(i.OutputRegister(), i.InputRegister(0));
989 } else {
990 __ Tzcntl(i.OutputRegister(), i.InputOperand(0));
991 }
992 break;
993 case kX64Popcnt:
994 if (instr->InputAt(0)->IsRegister()) {
995 __ Popcntq(i.OutputRegister(), i.InputRegister(0));
996 } else {
997 __ Popcntq(i.OutputRegister(), i.InputOperand(0));
998 }
999 break;
1000 case kX64Popcnt32:
1001 if (instr->InputAt(0)->IsRegister()) {
1002 __ Popcntl(i.OutputRegister(), i.InputRegister(0));
1003 } else {
1004 __ Popcntl(i.OutputRegister(), i.InputOperand(0));
1005 }
1006 break;
1007 case kSSEFloat32Cmp:
1008 ASSEMBLE_SSE_BINOP(Ucomiss);
1009 break;
1010 case kSSEFloat32Add:
1011 ASSEMBLE_SSE_BINOP(addss);
1012 break;
1013 case kSSEFloat32Sub:
1014 ASSEMBLE_SSE_BINOP(subss);
1015 break;
1016 case kSSEFloat32Mul:
1017 ASSEMBLE_SSE_BINOP(mulss);
1018 break;
1019 case kSSEFloat32Div:
1020 ASSEMBLE_SSE_BINOP(divss);
1021 // Don't delete this mov. It may improve performance on some CPUs,
1022 // when there is a (v)mulss depending on the result.
1023 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1024 break;
1025 case kSSEFloat32Abs: {
1026 // TODO(bmeurer): Use RIP relative 128-bit constants.
1027 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1028 __ psrlq(kScratchDoubleReg, 33);
1029 __ andps(i.OutputDoubleRegister(), kScratchDoubleReg);
1030 break;
1031 }
1032 case kSSEFloat32Neg: {
1033 // TODO(bmeurer): Use RIP relative 128-bit constants.
1034 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1035 __ psllq(kScratchDoubleReg, 31);
1036 __ xorps(i.OutputDoubleRegister(), kScratchDoubleReg);
1037 break;
1038 }
1039 case kSSEFloat32Sqrt:
1040 ASSEMBLE_SSE_UNOP(sqrtss);
1041 break;
1042 case kSSEFloat32Max:
1043 ASSEMBLE_SSE_BINOP(maxss);
1044 break;
1045 case kSSEFloat32Min:
1046 ASSEMBLE_SSE_BINOP(minss);
1047 break;
1048 case kSSEFloat32ToFloat64:
1049 ASSEMBLE_SSE_UNOP(Cvtss2sd);
1050 break;
1051 case kSSEFloat32Round: {
1052 CpuFeatureScope sse_scope(masm(), SSE4_1);
1053 RoundingMode const mode =
1054 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1055 __ Roundss(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
1056 break;
1057 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001058 case kSSEFloat32ToInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001059 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001060 __ Cvttss2si(i.OutputRegister(), i.InputDoubleRegister(0));
1061 } else {
1062 __ Cvttss2si(i.OutputRegister(), i.InputOperand(0));
1063 }
1064 break;
1065 case kSSEFloat32ToUint32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001066 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001067 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1068 } else {
1069 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1070 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001071 break;
1072 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001073 case kSSEFloat64Cmp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074 ASSEMBLE_SSE_BINOP(Ucomisd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076 case kSSEFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 ASSEMBLE_SSE_BINOP(addsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001078 break;
1079 case kSSEFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 ASSEMBLE_SSE_BINOP(subsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001081 break;
1082 case kSSEFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 ASSEMBLE_SSE_BINOP(mulsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 break;
1085 case kSSEFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086 ASSEMBLE_SSE_BINOP(divsd);
1087 // Don't delete this mov. It may improve performance on some CPUs,
1088 // when there is a (v)mulsd depending on the result.
1089 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001090 break;
1091 case kSSEFloat64Mod: {
1092 __ subq(rsp, Immediate(kDoubleSize));
1093 // Move values to st(0) and st(1).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 __ fld_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001096 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001097 __ fld_d(Operand(rsp, 0));
1098 // Loop while fprem isn't done.
1099 Label mod_loop;
1100 __ bind(&mod_loop);
1101 // This instructions traps on all kinds inputs, but we are assuming the
1102 // floating point control word is set to ignore them all.
1103 __ fprem();
1104 // The following 2 instruction implicitly use rax.
1105 __ fnstsw_ax();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001106 if (CpuFeatures::IsSupported(SAHF)) {
1107 CpuFeatureScope sahf_scope(masm(), SAHF);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001108 __ sahf();
1109 } else {
1110 __ shrl(rax, Immediate(8));
1111 __ andl(rax, Immediate(0xFF));
1112 __ pushq(rax);
1113 __ popfq();
1114 }
1115 __ j(parity_even, &mod_loop);
1116 // Move output to stack and clean up.
1117 __ fstp(1);
1118 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 __ Movsd(i.OutputDoubleRegister(), Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120 __ addq(rsp, Immediate(kDoubleSize));
1121 break;
1122 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123 case kSSEFloat64Max:
1124 ASSEMBLE_SSE_BINOP(maxsd);
1125 break;
1126 case kSSEFloat64Min:
1127 ASSEMBLE_SSE_BINOP(minsd);
1128 break;
1129 case kSSEFloat64Abs: {
1130 // TODO(bmeurer): Use RIP relative 128-bit constants.
1131 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1132 __ psrlq(kScratchDoubleReg, 1);
1133 __ andpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1134 break;
1135 }
1136 case kSSEFloat64Neg: {
1137 // TODO(bmeurer): Use RIP relative 128-bit constants.
1138 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1139 __ psllq(kScratchDoubleReg, 63);
1140 __ xorpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1141 break;
1142 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001143 case kSSEFloat64Sqrt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 ASSEMBLE_SSE_UNOP(sqrtsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146 case kSSEFloat64Round: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001147 CpuFeatureScope sse_scope(masm(), SSE4_1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001148 RoundingMode const mode =
1149 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1150 __ Roundsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 break;
1152 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153 case kSSEFloat64ToFloat32:
1154 ASSEMBLE_SSE_UNOP(Cvtsd2ss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001155 break;
1156 case kSSEFloat64ToInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001157 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ Cvttsd2si(i.OutputRegister(), i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001159 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 __ Cvttsd2si(i.OutputRegister(), i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001161 }
1162 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001163 case kSSEFloat64ToUint32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001164 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 }
Ben Murdochda12d292016-06-02 14:46:10 +01001169 if (MiscField::decode(instr->opcode())) {
1170 __ AssertZeroExtended(i.OutputRegister());
1171 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 break;
1173 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001174 case kSSEFloat32ToInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001175 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001176 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1177 } else {
1178 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1179 }
1180 if (instr->OutputCount() > 1) {
1181 __ Set(i.OutputRegister(1), 1);
1182 Label done;
1183 Label fail;
1184 __ Move(kScratchDoubleReg, static_cast<float>(INT64_MIN));
Ben Murdochc5610432016-08-08 18:44:38 +01001185 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001186 __ Ucomiss(kScratchDoubleReg, i.InputDoubleRegister(0));
1187 } else {
1188 __ Ucomiss(kScratchDoubleReg, i.InputOperand(0));
1189 }
1190 // If the input is NaN, then the conversion fails.
1191 __ j(parity_even, &fail);
1192 // If the input is INT64_MIN, then the conversion succeeds.
1193 __ j(equal, &done);
1194 __ cmpq(i.OutputRegister(0), Immediate(1));
1195 // If the conversion results in INT64_MIN, but the input was not
1196 // INT64_MIN, then the conversion fails.
1197 __ j(no_overflow, &done);
1198 __ bind(&fail);
1199 __ Set(i.OutputRegister(1), 0);
1200 __ bind(&done);
1201 }
1202 break;
1203 case kSSEFloat64ToInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001204 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001205 __ Cvttsd2siq(i.OutputRegister(0), i.InputDoubleRegister(0));
1206 } else {
1207 __ Cvttsd2siq(i.OutputRegister(0), i.InputOperand(0));
1208 }
1209 if (instr->OutputCount() > 1) {
1210 __ Set(i.OutputRegister(1), 1);
1211 Label done;
1212 Label fail;
1213 __ Move(kScratchDoubleReg, static_cast<double>(INT64_MIN));
Ben Murdochc5610432016-08-08 18:44:38 +01001214 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 __ Ucomisd(kScratchDoubleReg, i.InputDoubleRegister(0));
1216 } else {
1217 __ Ucomisd(kScratchDoubleReg, i.InputOperand(0));
1218 }
1219 // If the input is NaN, then the conversion fails.
1220 __ j(parity_even, &fail);
1221 // If the input is INT64_MIN, then the conversion succeeds.
1222 __ j(equal, &done);
1223 __ cmpq(i.OutputRegister(0), Immediate(1));
1224 // If the conversion results in INT64_MIN, but the input was not
1225 // INT64_MIN, then the conversion fails.
1226 __ j(no_overflow, &done);
1227 __ bind(&fail);
1228 __ Set(i.OutputRegister(1), 0);
1229 __ bind(&done);
1230 }
1231 break;
1232 case kSSEFloat32ToUint64: {
1233 Label done;
1234 Label success;
1235 if (instr->OutputCount() > 1) {
1236 __ Set(i.OutputRegister(1), 0);
1237 }
1238 // There does not exist a Float32ToUint64 instruction, so we have to use
1239 // the Float32ToInt64 instruction.
Ben Murdochc5610432016-08-08 18:44:38 +01001240 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001241 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1242 } else {
1243 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1244 }
1245 // Check if the result of the Float32ToInt64 conversion is positive, we
1246 // are already done.
1247 __ testq(i.OutputRegister(), i.OutputRegister());
1248 __ j(positive, &success);
1249 // The result of the first conversion was negative, which means that the
1250 // input value was not within the positive int64 range. We subtract 2^64
1251 // and convert it again to see if it is within the uint64 range.
1252 __ Move(kScratchDoubleReg, -9223372036854775808.0f);
Ben Murdochc5610432016-08-08 18:44:38 +01001253 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001254 __ addss(kScratchDoubleReg, i.InputDoubleRegister(0));
1255 } else {
1256 __ addss(kScratchDoubleReg, i.InputOperand(0));
1257 }
1258 __ Cvttss2siq(i.OutputRegister(), kScratchDoubleReg);
1259 __ testq(i.OutputRegister(), i.OutputRegister());
1260 // The only possible negative value here is 0x80000000000000000, which is
1261 // used on x64 to indicate an integer overflow.
1262 __ j(negative, &done);
1263 // The input value is within uint64 range and the second conversion worked
1264 // successfully, but we still have to undo the subtraction we did
1265 // earlier.
1266 __ Set(kScratchRegister, 0x8000000000000000);
1267 __ orq(i.OutputRegister(), kScratchRegister);
1268 __ bind(&success);
1269 if (instr->OutputCount() > 1) {
1270 __ Set(i.OutputRegister(1), 1);
1271 }
1272 __ bind(&done);
1273 break;
1274 }
1275 case kSSEFloat64ToUint64: {
1276 Label done;
1277 Label success;
1278 if (instr->OutputCount() > 1) {
1279 __ Set(i.OutputRegister(1), 0);
1280 }
1281 // There does not exist a Float64ToUint64 instruction, so we have to use
1282 // the Float64ToInt64 instruction.
Ben Murdochc5610432016-08-08 18:44:38 +01001283 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001284 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1285 } else {
1286 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
1287 }
1288 // Check if the result of the Float64ToInt64 conversion is positive, we
1289 // are already done.
1290 __ testq(i.OutputRegister(), i.OutputRegister());
1291 __ j(positive, &success);
1292 // The result of the first conversion was negative, which means that the
1293 // input value was not within the positive int64 range. We subtract 2^64
1294 // and convert it again to see if it is within the uint64 range.
1295 __ Move(kScratchDoubleReg, -9223372036854775808.0);
Ben Murdochc5610432016-08-08 18:44:38 +01001296 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001297 __ addsd(kScratchDoubleReg, i.InputDoubleRegister(0));
1298 } else {
1299 __ addsd(kScratchDoubleReg, i.InputOperand(0));
1300 }
1301 __ Cvttsd2siq(i.OutputRegister(), kScratchDoubleReg);
1302 __ testq(i.OutputRegister(), i.OutputRegister());
1303 // The only possible negative value here is 0x80000000000000000, which is
1304 // used on x64 to indicate an integer overflow.
1305 __ j(negative, &done);
1306 // The input value is within uint64 range and the second conversion worked
1307 // successfully, but we still have to undo the subtraction we did
1308 // earlier.
1309 __ Set(kScratchRegister, 0x8000000000000000);
1310 __ orq(i.OutputRegister(), kScratchRegister);
1311 __ bind(&success);
1312 if (instr->OutputCount() > 1) {
1313 __ Set(i.OutputRegister(1), 1);
1314 }
1315 __ bind(&done);
1316 break;
1317 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001318 case kSSEInt32ToFloat64:
1319 if (instr->InputAt(0)->IsRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001320 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001321 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001322 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323 }
1324 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001325 case kSSEInt32ToFloat32:
1326 if (instr->InputAt(0)->IsRegister()) {
1327 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1328 } else {
1329 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1330 }
1331 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 case kSSEInt64ToFloat32:
1333 if (instr->InputAt(0)->IsRegister()) {
1334 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1335 } else {
1336 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1337 }
1338 break;
1339 case kSSEInt64ToFloat64:
1340 if (instr->InputAt(0)->IsRegister()) {
1341 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
1342 } else {
1343 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
1344 }
1345 break;
1346 case kSSEUint64ToFloat32:
1347 if (instr->InputAt(0)->IsRegister()) {
1348 __ movq(kScratchRegister, i.InputRegister(0));
1349 } else {
1350 __ movq(kScratchRegister, i.InputOperand(0));
1351 }
1352 __ Cvtqui2ss(i.OutputDoubleRegister(), kScratchRegister,
1353 i.TempRegister(0));
1354 break;
1355 case kSSEUint64ToFloat64:
1356 if (instr->InputAt(0)->IsRegister()) {
1357 __ movq(kScratchRegister, i.InputRegister(0));
1358 } else {
1359 __ movq(kScratchRegister, i.InputOperand(0));
1360 }
1361 __ Cvtqui2sd(i.OutputDoubleRegister(), kScratchRegister,
1362 i.TempRegister(0));
1363 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001364 case kSSEUint32ToFloat64:
1365 if (instr->InputAt(0)->IsRegister()) {
1366 __ movl(kScratchRegister, i.InputRegister(0));
1367 } else {
1368 __ movl(kScratchRegister, i.InputOperand(0));
1369 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 __ Cvtqsi2sd(i.OutputDoubleRegister(), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001372 case kSSEUint32ToFloat32:
1373 if (instr->InputAt(0)->IsRegister()) {
1374 __ movl(kScratchRegister, i.InputRegister(0));
1375 } else {
1376 __ movl(kScratchRegister, i.InputOperand(0));
1377 }
1378 __ Cvtqsi2ss(i.OutputDoubleRegister(), kScratchRegister);
1379 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001380 case kSSEFloat64ExtractLowWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001381 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001382 __ movl(i.OutputRegister(), i.InputOperand(0));
1383 } else {
1384 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1385 }
1386 break;
1387 case kSSEFloat64ExtractHighWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001388 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 __ movl(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
1390 } else {
1391 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1);
1392 }
1393 break;
1394 case kSSEFloat64InsertLowWord32:
1395 if (instr->InputAt(1)->IsRegister()) {
1396 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 0);
1397 } else {
1398 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0);
1399 }
1400 break;
1401 case kSSEFloat64InsertHighWord32:
1402 if (instr->InputAt(1)->IsRegister()) {
1403 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 1);
1404 } else {
1405 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1);
1406 }
1407 break;
1408 case kSSEFloat64LoadLowWord32:
1409 if (instr->InputAt(0)->IsRegister()) {
1410 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1411 } else {
1412 __ Movd(i.OutputDoubleRegister(), i.InputOperand(0));
1413 }
1414 break;
1415 case kAVXFloat32Cmp: {
1416 CpuFeatureScope avx_scope(masm(), AVX);
Ben Murdochc5610432016-08-08 18:44:38 +01001417 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 __ vucomiss(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1419 } else {
1420 __ vucomiss(i.InputDoubleRegister(0), i.InputOperand(1));
1421 }
1422 break;
1423 }
1424 case kAVXFloat32Add:
1425 ASSEMBLE_AVX_BINOP(vaddss);
1426 break;
1427 case kAVXFloat32Sub:
1428 ASSEMBLE_AVX_BINOP(vsubss);
1429 break;
1430 case kAVXFloat32Mul:
1431 ASSEMBLE_AVX_BINOP(vmulss);
1432 break;
1433 case kAVXFloat32Div:
1434 ASSEMBLE_AVX_BINOP(vdivss);
1435 // Don't delete this mov. It may improve performance on some CPUs,
1436 // when there is a (v)mulss depending on the result.
1437 __ Movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1438 break;
1439 case kAVXFloat32Max:
1440 ASSEMBLE_AVX_BINOP(vmaxss);
1441 break;
1442 case kAVXFloat32Min:
1443 ASSEMBLE_AVX_BINOP(vminss);
1444 break;
1445 case kAVXFloat64Cmp: {
1446 CpuFeatureScope avx_scope(masm(), AVX);
Ben Murdochc5610432016-08-08 18:44:38 +01001447 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 __ vucomisd(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1449 } else {
1450 __ vucomisd(i.InputDoubleRegister(0), i.InputOperand(1));
1451 }
1452 break;
1453 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001454 case kAVXFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001455 ASSEMBLE_AVX_BINOP(vaddsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001456 break;
1457 case kAVXFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001458 ASSEMBLE_AVX_BINOP(vsubsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001459 break;
1460 case kAVXFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 ASSEMBLE_AVX_BINOP(vmulsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001462 break;
1463 case kAVXFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464 ASSEMBLE_AVX_BINOP(vdivsd);
1465 // Don't delete this mov. It may improve performance on some CPUs,
1466 // when there is a (v)mulsd depending on the result.
1467 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001468 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469 case kAVXFloat64Max:
1470 ASSEMBLE_AVX_BINOP(vmaxsd);
1471 break;
1472 case kAVXFloat64Min:
1473 ASSEMBLE_AVX_BINOP(vminsd);
1474 break;
1475 case kAVXFloat32Abs: {
1476 // TODO(bmeurer): Use RIP relative 128-bit constants.
1477 CpuFeatureScope avx_scope(masm(), AVX);
1478 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1479 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 33);
Ben Murdochc5610432016-08-08 18:44:38 +01001480 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001481 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1482 i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001483 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1485 i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001486 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001487 break;
1488 }
1489 case kAVXFloat32Neg: {
1490 // TODO(bmeurer): Use RIP relative 128-bit constants.
1491 CpuFeatureScope avx_scope(masm(), AVX);
1492 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1493 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 31);
Ben Murdochc5610432016-08-08 18:44:38 +01001494 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001495 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1496 i.InputDoubleRegister(0));
1497 } else {
1498 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1499 i.InputOperand(0));
1500 }
1501 break;
1502 }
1503 case kAVXFloat64Abs: {
1504 // TODO(bmeurer): Use RIP relative 128-bit constants.
1505 CpuFeatureScope avx_scope(masm(), AVX);
1506 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1507 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001508 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1510 i.InputDoubleRegister(0));
1511 } else {
1512 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1513 i.InputOperand(0));
1514 }
1515 break;
1516 }
1517 case kAVXFloat64Neg: {
1518 // TODO(bmeurer): Use RIP relative 128-bit constants.
1519 CpuFeatureScope avx_scope(masm(), AVX);
1520 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1521 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 63);
Ben Murdochc5610432016-08-08 18:44:38 +01001522 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001523 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1524 i.InputDoubleRegister(0));
1525 } else {
1526 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1527 i.InputOperand(0));
1528 }
1529 break;
1530 }
1531 case kX64Movsxbl:
1532 ASSEMBLE_MOVX(movsxbl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001533 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001534 break;
1535 case kX64Movzxbl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 ASSEMBLE_MOVX(movzxbl);
1537 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001538 break;
1539 case kX64Movb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001541 Operand operand = i.MemoryOperand(&index);
1542 if (HasImmediateInput(instr, index)) {
1543 __ movb(operand, Immediate(i.InputInt8(index)));
1544 } else {
1545 __ movb(operand, i.InputRegister(index));
1546 }
1547 break;
1548 }
1549 case kX64Movsxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550 ASSEMBLE_MOVX(movsxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001551 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552 break;
1553 case kX64Movzxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 ASSEMBLE_MOVX(movzxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001555 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 break;
1557 case kX64Movw: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001559 Operand operand = i.MemoryOperand(&index);
1560 if (HasImmediateInput(instr, index)) {
1561 __ movw(operand, Immediate(i.InputInt16(index)));
1562 } else {
1563 __ movw(operand, i.InputRegister(index));
1564 }
1565 break;
1566 }
1567 case kX64Movl:
1568 if (instr->HasOutput()) {
1569 if (instr->addressing_mode() == kMode_None) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001570 if (instr->InputAt(0)->IsRegister()) {
1571 __ movl(i.OutputRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001573 __ movl(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574 }
1575 } else {
1576 __ movl(i.OutputRegister(), i.MemoryOperand());
1577 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001578 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001581 Operand operand = i.MemoryOperand(&index);
1582 if (HasImmediateInput(instr, index)) {
1583 __ movl(operand, i.InputImmediate(index));
1584 } else {
1585 __ movl(operand, i.InputRegister(index));
1586 }
1587 }
1588 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001589 case kX64Movsxlq:
1590 ASSEMBLE_MOVX(movsxlq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001591 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001592 case kX64Movq:
1593 if (instr->HasOutput()) {
1594 __ movq(i.OutputRegister(), i.MemoryOperand());
1595 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001596 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001597 Operand operand = i.MemoryOperand(&index);
1598 if (HasImmediateInput(instr, index)) {
1599 __ movq(operand, i.InputImmediate(index));
1600 } else {
1601 __ movq(operand, i.InputRegister(index));
1602 }
1603 }
1604 break;
1605 case kX64Movss:
1606 if (instr->HasOutput()) {
1607 __ movss(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001610 Operand operand = i.MemoryOperand(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001611 __ movss(operand, i.InputDoubleRegister(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 }
1613 break;
1614 case kX64Movsd:
1615 if (instr->HasOutput()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616 __ Movsd(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001617 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001619 Operand operand = i.MemoryOperand(&index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620 __ Movsd(operand, i.InputDoubleRegister(index));
1621 }
1622 break;
1623 case kX64BitcastFI:
Ben Murdochc5610432016-08-08 18:44:38 +01001624 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 __ movl(i.OutputRegister(), i.InputOperand(0));
1626 } else {
1627 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1628 }
1629 break;
1630 case kX64BitcastDL:
Ben Murdochc5610432016-08-08 18:44:38 +01001631 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 __ movq(i.OutputRegister(), i.InputOperand(0));
1633 } else {
1634 __ Movq(i.OutputRegister(), i.InputDoubleRegister(0));
1635 }
1636 break;
1637 case kX64BitcastIF:
1638 if (instr->InputAt(0)->IsRegister()) {
1639 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1640 } else {
1641 __ movss(i.OutputDoubleRegister(), i.InputOperand(0));
1642 }
1643 break;
1644 case kX64BitcastLD:
1645 if (instr->InputAt(0)->IsRegister()) {
1646 __ Movq(i.OutputDoubleRegister(), i.InputRegister(0));
1647 } else {
1648 __ Movsd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 }
1650 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001651 case kX64Lea32: {
1652 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1653 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1654 // and addressing mode just happens to work out. The "addl"/"subl" forms
1655 // in these cases are faster based on measurements.
1656 if (i.InputRegister(0).is(i.OutputRegister())) {
1657 if (mode == kMode_MRI) {
1658 int32_t constant_summand = i.InputInt32(1);
1659 if (constant_summand > 0) {
1660 __ addl(i.OutputRegister(), Immediate(constant_summand));
1661 } else if (constant_summand < 0) {
1662 __ subl(i.OutputRegister(), Immediate(-constant_summand));
1663 }
1664 } else if (mode == kMode_MR1) {
1665 if (i.InputRegister(1).is(i.OutputRegister())) {
1666 __ shll(i.OutputRegister(), Immediate(1));
1667 } else {
1668 __ leal(i.OutputRegister(), i.MemoryOperand());
1669 }
1670 } else if (mode == kMode_M2) {
1671 __ shll(i.OutputRegister(), Immediate(1));
1672 } else if (mode == kMode_M4) {
1673 __ shll(i.OutputRegister(), Immediate(2));
1674 } else if (mode == kMode_M8) {
1675 __ shll(i.OutputRegister(), Immediate(3));
1676 } else {
1677 __ leal(i.OutputRegister(), i.MemoryOperand());
1678 }
1679 } else {
1680 __ leal(i.OutputRegister(), i.MemoryOperand());
1681 }
1682 __ AssertZeroExtended(i.OutputRegister());
1683 break;
1684 }
1685 case kX64Lea:
1686 __ leaq(i.OutputRegister(), i.MemoryOperand());
1687 break;
1688 case kX64Dec32:
1689 __ decl(i.OutputRegister());
1690 break;
1691 case kX64Inc32:
1692 __ incl(i.OutputRegister());
1693 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001694 case kX64Push:
1695 if (HasImmediateInput(instr, 0)) {
1696 __ pushq(i.InputImmediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001697 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001699 if (instr->InputAt(0)->IsRegister()) {
1700 __ pushq(i.InputRegister(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochc5610432016-08-08 18:44:38 +01001702 } else if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001703 // TODO(titzer): use another machine instruction?
1704 __ subq(rsp, Immediate(kDoubleSize));
1705 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1706 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001708 __ pushq(i.InputOperand(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710 }
1711 }
1712 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001713 case kX64Poke: {
1714 int const slot = MiscField::decode(instr->opcode());
1715 if (HasImmediateInput(instr, 0)) {
1716 __ movq(Operand(rsp, slot * kPointerSize), i.InputImmediate(0));
1717 } else {
1718 __ movq(Operand(rsp, slot * kPointerSize), i.InputRegister(0));
1719 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 break;
1721 }
Ben Murdochc5610432016-08-08 18:44:38 +01001722 case kX64Xchgb: {
1723 size_t index = 0;
1724 Operand operand = i.MemoryOperand(&index);
1725 __ xchgb(i.InputRegister(index), operand);
1726 break;
1727 }
1728 case kX64Xchgw: {
1729 size_t index = 0;
1730 Operand operand = i.MemoryOperand(&index);
1731 __ xchgw(i.InputRegister(index), operand);
1732 break;
1733 }
1734 case kX64Xchgl: {
1735 size_t index = 0;
1736 Operand operand = i.MemoryOperand(&index);
1737 __ xchgl(i.InputRegister(index), operand);
1738 break;
1739 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001740 case kCheckedLoadInt8:
1741 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxbl);
1742 break;
1743 case kCheckedLoadUint8:
1744 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxbl);
1745 break;
1746 case kCheckedLoadInt16:
1747 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxwl);
1748 break;
1749 case kCheckedLoadUint16:
1750 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxwl);
1751 break;
1752 case kCheckedLoadWord32:
1753 ASSEMBLE_CHECKED_LOAD_INTEGER(movl);
1754 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 case kCheckedLoadWord64:
1756 ASSEMBLE_CHECKED_LOAD_INTEGER(movq);
1757 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001758 case kCheckedLoadFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 ASSEMBLE_CHECKED_LOAD_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001760 break;
1761 case kCheckedLoadFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001762 ASSEMBLE_CHECKED_LOAD_FLOAT(Movsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001763 break;
1764 case kCheckedStoreWord8:
1765 ASSEMBLE_CHECKED_STORE_INTEGER(movb);
1766 break;
1767 case kCheckedStoreWord16:
1768 ASSEMBLE_CHECKED_STORE_INTEGER(movw);
1769 break;
1770 case kCheckedStoreWord32:
1771 ASSEMBLE_CHECKED_STORE_INTEGER(movl);
1772 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773 case kCheckedStoreWord64:
1774 ASSEMBLE_CHECKED_STORE_INTEGER(movq);
1775 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001776 case kCheckedStoreFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001777 ASSEMBLE_CHECKED_STORE_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001778 break;
1779 case kCheckedStoreFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 ASSEMBLE_CHECKED_STORE_FLOAT(Movsd);
1781 break;
1782 case kX64StackCheck:
1783 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001784 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001785 case kAtomicLoadInt8:
1786 case kAtomicLoadUint8:
1787 case kAtomicLoadInt16:
1788 case kAtomicLoadUint16:
1789 case kAtomicLoadWord32:
1790 case kAtomicStoreWord8:
1791 case kAtomicStoreWord16:
1792 case kAtomicStoreWord32:
1793 UNREACHABLE(); // Won't be generated by instruction selector.
1794 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795 }
Ben Murdochc5610432016-08-08 18:44:38 +01001796 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001798
1799
1800// Assembles branches after this instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001801void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 X64OperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001803 Label::Distance flabel_distance =
1804 branch->fallthru ? Label::kNear : Label::kFar;
1805 Label* tlabel = branch->true_label;
1806 Label* flabel = branch->false_label;
1807 switch (branch->condition) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 case kUnorderedEqual:
1809 __ j(parity_even, flabel, flabel_distance);
1810 // Fall through.
1811 case kEqual:
1812 __ j(equal, tlabel);
1813 break;
1814 case kUnorderedNotEqual:
1815 __ j(parity_even, tlabel);
1816 // Fall through.
1817 case kNotEqual:
1818 __ j(not_equal, tlabel);
1819 break;
1820 case kSignedLessThan:
1821 __ j(less, tlabel);
1822 break;
1823 case kSignedGreaterThanOrEqual:
1824 __ j(greater_equal, tlabel);
1825 break;
1826 case kSignedLessThanOrEqual:
1827 __ j(less_equal, tlabel);
1828 break;
1829 case kSignedGreaterThan:
1830 __ j(greater, tlabel);
1831 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001832 case kUnsignedLessThan:
1833 __ j(below, tlabel);
1834 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835 case kUnsignedGreaterThanOrEqual:
1836 __ j(above_equal, tlabel);
1837 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001838 case kUnsignedLessThanOrEqual:
1839 __ j(below_equal, tlabel);
1840 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001841 case kUnsignedGreaterThan:
1842 __ j(above, tlabel);
1843 break;
1844 case kOverflow:
1845 __ j(overflow, tlabel);
1846 break;
1847 case kNotOverflow:
1848 __ j(no_overflow, tlabel);
1849 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850 default:
1851 UNREACHABLE();
1852 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001854 if (!branch->fallthru) __ jmp(flabel, flabel_distance);
1855}
1856
1857
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001858void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001859 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001860}
1861
1862
1863// Assembles boolean materializations after this instruction.
1864void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1865 FlagsCondition condition) {
1866 X64OperandConverter i(this, instr);
1867 Label done;
1868
1869 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1870 // last output of the instruction.
1871 Label check;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001872 DCHECK_NE(0u, instr->OutputCount());
1873 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001874 Condition cc = no_condition;
1875 switch (condition) {
1876 case kUnorderedEqual:
1877 __ j(parity_odd, &check, Label::kNear);
1878 __ movl(reg, Immediate(0));
1879 __ jmp(&done, Label::kNear);
1880 // Fall through.
1881 case kEqual:
1882 cc = equal;
1883 break;
1884 case kUnorderedNotEqual:
1885 __ j(parity_odd, &check, Label::kNear);
1886 __ movl(reg, Immediate(1));
1887 __ jmp(&done, Label::kNear);
1888 // Fall through.
1889 case kNotEqual:
1890 cc = not_equal;
1891 break;
1892 case kSignedLessThan:
1893 cc = less;
1894 break;
1895 case kSignedGreaterThanOrEqual:
1896 cc = greater_equal;
1897 break;
1898 case kSignedLessThanOrEqual:
1899 cc = less_equal;
1900 break;
1901 case kSignedGreaterThan:
1902 cc = greater;
1903 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001904 case kUnsignedLessThan:
1905 cc = below;
1906 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001907 case kUnsignedGreaterThanOrEqual:
1908 cc = above_equal;
1909 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001910 case kUnsignedLessThanOrEqual:
1911 cc = below_equal;
1912 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001913 case kUnsignedGreaterThan:
1914 cc = above;
1915 break;
1916 case kOverflow:
1917 cc = overflow;
1918 break;
1919 case kNotOverflow:
1920 cc = no_overflow;
1921 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001922 default:
1923 UNREACHABLE();
1924 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001925 }
1926 __ bind(&check);
1927 __ setcc(cc, reg);
1928 __ movzxbl(reg, reg);
1929 __ bind(&done);
1930}
1931
1932
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1934 X64OperandConverter i(this, instr);
1935 Register input = i.InputRegister(0);
1936 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1937 __ cmpl(input, Immediate(i.InputInt32(index + 0)));
1938 __ j(equal, GetLabel(i.InputRpo(index + 1)));
1939 }
1940 AssembleArchJump(i.InputRpo(1));
1941}
1942
1943
1944void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1945 X64OperandConverter i(this, instr);
1946 Register input = i.InputRegister(0);
1947 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1948 Label** cases = zone()->NewArray<Label*>(case_count);
1949 for (int32_t index = 0; index < case_count; ++index) {
1950 cases[index] = GetLabel(i.InputRpo(index + 2));
1951 }
1952 Label* const table = AddJumpTable(cases, case_count);
1953 __ cmpl(input, Immediate(case_count));
1954 __ j(above_equal, GetLabel(i.InputRpo(1)));
1955 __ leaq(kScratchRegister, Operand(table));
1956 __ jmp(Operand(kScratchRegister, input, times_8, 0));
1957}
1958
Ben Murdochc5610432016-08-08 18:44:38 +01001959CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001960 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001961 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001962 isolate(), deoptimization_id, bailout_type);
Ben Murdochc5610432016-08-08 18:44:38 +01001963 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01001965 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001966}
1967
1968
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001969namespace {
1970
1971static const int kQuadWordSize = 16;
1972
1973} // namespace
1974
Ben Murdochc5610432016-08-08 18:44:38 +01001975void CodeGenerator::FinishFrame(Frame* frame) {
1976 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001977
Ben Murdochc5610432016-08-08 18:44:38 +01001978 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1979 if (saves_fp != 0) {
1980 frame->AlignSavedCalleeRegisterSlots();
1981 if (saves_fp != 0) { // Save callee-saved XMM registers.
1982 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
1983 frame->AllocateSavedCalleeRegisterSlots(saves_fp_count *
1984 (kQuadWordSize / kPointerSize));
1985 }
1986 }
1987 const RegList saves = descriptor->CalleeSavedRegisters();
1988 if (saves != 0) { // Save callee-saved registers.
1989 int count = 0;
1990 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
1991 if (((1 << i) & saves)) {
1992 ++count;
1993 }
1994 }
1995 frame->AllocateSavedCalleeRegisterSlots(count);
1996 }
1997}
1998
1999void CodeGenerator::AssembleConstructFrame() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01002001 if (frame_access_state()->has_frame()) {
2002 if (descriptor->IsCFunctionCall()) {
2003 __ pushq(rbp);
2004 __ movq(rbp, rsp);
2005 } else if (descriptor->IsJSFunctionCall()) {
2006 __ Prologue(this->info()->GeneratePreagedPrologue());
2007 } else {
2008 __ StubPrologue(info()->GetOutputStackFrameType());
2009 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 }
Ben Murdochc5610432016-08-08 18:44:38 +01002011 int shrink_slots = frame()->GetSpillSlotCount();
2012
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002013 if (info()->is_osr()) {
2014 // TurboFan OSR-compiled functions cannot be entered directly.
2015 __ Abort(kShouldNotDirectlyEnterOsrFunction);
2016
2017 // Unoptimized code jumps directly to this entrypoint while the unoptimized
2018 // frame is still on the stack. Optimized code uses OSR values directly from
2019 // the unoptimized frame. Thus, all that needs to be done is to allocate the
2020 // remaining stack slots.
2021 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
2022 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01002023 shrink_slots -= static_cast<int>(OsrHelper(info()).UnoptimizedFrameSlots());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024 }
2025
2026 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01002027 if (shrink_slots > 0) {
2028 __ subq(rsp, Immediate(shrink_slots * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002029 }
2030
2031 if (saves_fp != 0) { // Save callee-saved XMM registers.
2032 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2033 const int stack_size = saves_fp_count * kQuadWordSize;
2034 // Adjust the stack pointer.
2035 __ subp(rsp, Immediate(stack_size));
2036 // Store the registers on the stack.
2037 int slot_idx = 0;
2038 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
2039 if (!((1 << i) & saves_fp)) continue;
2040 __ movdqu(Operand(rsp, kQuadWordSize * slot_idx),
2041 XMMRegister::from_code(i));
2042 slot_idx++;
2043 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002044 }
2045
2046 const RegList saves = descriptor->CalleeSavedRegisters();
2047 if (saves != 0) { // Save callee-saved registers.
2048 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2049 if (!((1 << i) & saves)) continue;
2050 __ pushq(Register::from_code(i));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002052 }
2053}
2054
2055
2056void CodeGenerator::AssembleReturn() {
2057 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002058
2059 // Restore registers.
2060 const RegList saves = descriptor->CalleeSavedRegisters();
2061 if (saves != 0) {
2062 for (int i = 0; i < Register::kNumRegisters; i++) {
2063 if (!((1 << i) & saves)) continue;
2064 __ popq(Register::from_code(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066 }
2067 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
2068 if (saves_fp != 0) {
2069 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2070 const int stack_size = saves_fp_count * kQuadWordSize;
2071 // Load the registers from the stack.
2072 int slot_idx = 0;
2073 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
2074 if (!((1 << i) & saves_fp)) continue;
2075 __ movdqu(XMMRegister::from_code(i),
2076 Operand(rsp, kQuadWordSize * slot_idx));
2077 slot_idx++;
2078 }
2079 // Adjust the stack pointer.
2080 __ addp(rsp, Immediate(stack_size));
2081 }
2082
2083 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002084 AssembleDeconstructFrame();
2085 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002086 // Canonicalize JSFunction return sites for now.
2087 if (return_label_.is_bound()) {
2088 __ jmp(&return_label_);
2089 return;
2090 } else {
2091 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01002092 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002094 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095 size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
2096 // Might need rcx for scratch if pop_size is too big.
2097 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
2098 __ Ret(static_cast<int>(pop_size), rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002099}
2100
2101
2102void CodeGenerator::AssembleMove(InstructionOperand* source,
2103 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002104 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002105 // Dispatch on the source and destination operand kinds. Not all
2106 // combinations are possible.
2107 if (source->IsRegister()) {
2108 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2109 Register src = g.ToRegister(source);
2110 if (destination->IsRegister()) {
2111 __ movq(g.ToRegister(destination), src);
2112 } else {
2113 __ movq(g.ToOperand(destination), src);
2114 }
2115 } else if (source->IsStackSlot()) {
2116 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2117 Operand src = g.ToOperand(source);
2118 if (destination->IsRegister()) {
2119 Register dst = g.ToRegister(destination);
2120 __ movq(dst, src);
2121 } else {
2122 // Spill on demand to use a temporary register for memory-to-memory
2123 // moves.
2124 Register tmp = kScratchRegister;
2125 Operand dst = g.ToOperand(destination);
2126 __ movq(tmp, src);
2127 __ movq(dst, tmp);
2128 }
2129 } else if (source->IsConstant()) {
2130 ConstantOperand* constant_source = ConstantOperand::cast(source);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002131 Constant src = g.ToConstant(constant_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002132 if (destination->IsRegister() || destination->IsStackSlot()) {
2133 Register dst = destination->IsRegister() ? g.ToRegister(destination)
2134 : kScratchRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002135 switch (src.type()) {
Ben Murdochc5610432016-08-08 18:44:38 +01002136 case Constant::kInt32: {
2137 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE) {
2138 __ movq(dst, src.ToInt64(), src.rmode());
2139 } else {
2140 // TODO(dcarney): don't need scratch in this case.
2141 int32_t value = src.ToInt32();
2142 if (value == 0) {
2143 __ xorl(dst, dst);
2144 } else {
2145 __ movl(dst, Immediate(value));
2146 }
2147 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 break;
Ben Murdochc5610432016-08-08 18:44:38 +01002149 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002150 case Constant::kInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01002151 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE) {
2152 __ movq(dst, src.ToInt64(), src.rmode());
2153 } else {
2154 DCHECK(src.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
2155 __ Set(dst, src.ToInt64());
2156 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002157 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002158 case Constant::kFloat32:
2159 __ Move(dst,
2160 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
2161 break;
2162 case Constant::kFloat64:
2163 __ Move(dst,
2164 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
2165 break;
2166 case Constant::kExternalReference:
2167 __ Move(dst, src.ToExternalReference());
2168 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002169 case Constant::kHeapObject: {
2170 Handle<HeapObject> src_object = src.ToHeapObject();
2171 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01002172 int slot;
2173 if (IsMaterializableFromFrame(src_object, &slot)) {
2174 __ movp(dst, g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175 } else if (IsMaterializableFromRoot(src_object, &index)) {
2176 __ LoadRoot(dst, index);
2177 } else {
2178 __ Move(dst, src_object);
2179 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002180 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002181 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002182 case Constant::kRpoNumber:
2183 UNREACHABLE(); // TODO(dcarney): load of labels on x64.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002184 break;
2185 }
2186 if (destination->IsStackSlot()) {
2187 __ movq(g.ToOperand(destination), kScratchRegister);
2188 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002189 } else if (src.type() == Constant::kFloat32) {
2190 // TODO(turbofan): Can we do better here?
2191 uint32_t src_const = bit_cast<uint32_t>(src.ToFloat32());
Ben Murdochc5610432016-08-08 18:44:38 +01002192 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002193 __ Move(g.ToDoubleRegister(destination), src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002194 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002195 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002196 Operand dst = g.ToOperand(destination);
2197 __ movl(dst, Immediate(src_const));
2198 }
2199 } else {
2200 DCHECK_EQ(Constant::kFloat64, src.type());
2201 uint64_t src_const = bit_cast<uint64_t>(src.ToFloat64());
Ben Murdochc5610432016-08-08 18:44:38 +01002202 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002203 __ Move(g.ToDoubleRegister(destination), src_const);
2204 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002205 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002206 __ movq(kScratchRegister, src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002207 __ movq(g.ToOperand(destination), kScratchRegister);
2208 }
2209 }
Ben Murdochc5610432016-08-08 18:44:38 +01002210 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 XMMRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002212 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002213 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214 __ Movapd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002215 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002216 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002217 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002218 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002219 }
Ben Murdochc5610432016-08-08 18:44:38 +01002220 } else if (source->IsFPStackSlot()) {
2221 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002222 Operand src = g.ToOperand(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002223 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002224 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002225 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226 } else {
2227 // We rely on having xmm0 available as a fixed scratch register.
2228 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002229 __ Movsd(xmm0, src);
2230 __ Movsd(dst, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002231 }
2232 } else {
2233 UNREACHABLE();
2234 }
2235}
2236
2237
2238void CodeGenerator::AssembleSwap(InstructionOperand* source,
2239 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002240 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002241 // Dispatch on the source and destination operand kinds. Not all
2242 // combinations are possible.
2243 if (source->IsRegister() && destination->IsRegister()) {
2244 // Register-register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 Register src = g.ToRegister(source);
2246 Register dst = g.ToRegister(destination);
2247 __ movq(kScratchRegister, src);
2248 __ movq(src, dst);
2249 __ movq(dst, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002250 } else if (source->IsRegister() && destination->IsStackSlot()) {
2251 Register src = g.ToRegister(source);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002252 __ pushq(src);
2253 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002255 __ movq(src, dst);
2256 frame_access_state()->IncreaseSPDelta(-1);
2257 dst = g.ToOperand(destination);
2258 __ popq(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002259 } else if ((source->IsStackSlot() && destination->IsStackSlot()) ||
Ben Murdochc5610432016-08-08 18:44:38 +01002260 (source->IsFPStackSlot() && destination->IsFPStackSlot())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002261 // Memory-memory.
2262 Register tmp = kScratchRegister;
2263 Operand src = g.ToOperand(source);
2264 Operand dst = g.ToOperand(destination);
2265 __ movq(tmp, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002266 __ pushq(src);
2267 frame_access_state()->IncreaseSPDelta(1);
2268 src = g.ToOperand(source);
2269 __ movq(src, tmp);
2270 frame_access_state()->IncreaseSPDelta(-1);
2271 dst = g.ToOperand(destination);
2272 __ popq(dst);
Ben Murdochc5610432016-08-08 18:44:38 +01002273 } else if (source->IsFPRegister() && destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002274 // XMM register-register swap. We rely on having xmm0
2275 // available as a fixed scratch register.
2276 XMMRegister src = g.ToDoubleRegister(source);
2277 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002278 __ Movapd(xmm0, src);
2279 __ Movapd(src, dst);
2280 __ Movapd(dst, xmm0);
Ben Murdochc5610432016-08-08 18:44:38 +01002281 } else if (source->IsFPRegister() && destination->IsFPStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002282 // XMM register-memory swap. We rely on having xmm0
2283 // available as a fixed scratch register.
2284 XMMRegister src = g.ToDoubleRegister(source);
2285 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002286 __ Movsd(xmm0, src);
2287 __ Movsd(src, dst);
2288 __ Movsd(dst, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289 } else {
2290 // No other combinations are possible.
2291 UNREACHABLE();
2292 }
2293}
2294
2295
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002296void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2297 for (size_t index = 0; index < target_count; ++index) {
2298 __ dq(targets[index]);
2299 }
2300}
2301
2302
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002303void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002304 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2305 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002306 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002307
2308 int space_needed = Deoptimizer::patch_size();
2309 // Ensure that we have enough space after the previous lazy-bailout
2310 // instruction for patching the code here.
2311 int current_pc = masm()->pc_offset();
2312 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2313 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2314 __ Nop(padding_size);
2315 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002316}
2317
2318#undef __
2319
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002320} // namespace compiler
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321} // namespace internal
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002322} // namespace v8