blob: 2ae1fc909e55ac72c5eea5f4ec09950c005f6443 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/x64/assembler-x64.h"
13#include "src/x64/macro-assembler-x64.h"
14
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021// Adds X64 specific methods for decoding operands.
22class X64OperandConverter : public InstructionOperandConverter {
23 public:
24 X64OperandConverter(CodeGenerator* gen, Instruction* instr)
25 : InstructionOperandConverter(gen, instr) {}
26
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 Immediate InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000028 return ToImmediate(instr_->InputAt(index));
29 }
30
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 Operand InputOperand(size_t index, int extra = 0) {
32 return ToOperand(instr_->InputAt(index), extra);
33 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034
Emily Bernierd0a1eb72015-03-24 16:35:39 -040035 Operand OutputOperand() { return ToOperand(instr_->Output()); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036
37 Immediate ToImmediate(InstructionOperand* operand) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038 Constant constant = ToConstant(operand);
39 if (constant.type() == Constant::kFloat64) {
40 DCHECK_EQ(0, bit_cast<int64_t>(constant.ToFloat64()));
41 return Immediate(0);
42 }
Ben Murdochc5610432016-08-08 18:44:38 +010043 if (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
Ben Murdoch61f157c2016-09-16 13:49:30 +010044 constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE ||
45 constant.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
Ben Murdochc5610432016-08-08 18:44:38 +010046 return Immediate(constant.ToInt32(), constant.rmode());
47 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048 return Immediate(constant.ToInt32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000049 }
50
51 Operand ToOperand(InstructionOperand* op, int extra = 0) {
Ben Murdochc5610432016-08-08 18:44:38 +010052 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010053 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra);
54 }
55
56 Operand SlotToOperand(int slot_index, int extra = 0) {
57 FrameOffset offset = frame_access_state()->GetFrameOffset(slot_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 return Operand(offset.from_stack_pointer() ? rsp : rbp,
59 offset.offset() + extra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 }
61
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062 static size_t NextOffset(size_t* offset) {
63 size_t i = *offset;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040064 (*offset)++;
65 return i;
66 }
67
68 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
69 STATIC_ASSERT(0 == static_cast<int>(times_1));
70 STATIC_ASSERT(1 == static_cast<int>(times_2));
71 STATIC_ASSERT(2 == static_cast<int>(times_4));
72 STATIC_ASSERT(3 == static_cast<int>(times_8));
73 int scale = static_cast<int>(mode - one);
74 DCHECK(scale >= 0 && scale < 4);
75 return static_cast<ScaleFactor>(scale);
76 }
77
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 Operand MemoryOperand(size_t* offset) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040079 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
80 switch (mode) {
81 case kMode_MR: {
82 Register base = InputRegister(NextOffset(offset));
83 int32_t disp = 0;
84 return Operand(base, disp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086 case kMode_MRI: {
87 Register base = InputRegister(NextOffset(offset));
88 int32_t disp = InputInt32(NextOffset(offset));
89 return Operand(base, disp);
90 }
91 case kMode_MR1:
92 case kMode_MR2:
93 case kMode_MR4:
94 case kMode_MR8: {
95 Register base = InputRegister(NextOffset(offset));
96 Register index = InputRegister(NextOffset(offset));
97 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
98 int32_t disp = 0;
99 return Operand(base, index, scale, disp);
100 }
101 case kMode_MR1I:
102 case kMode_MR2I:
103 case kMode_MR4I:
104 case kMode_MR8I: {
105 Register base = InputRegister(NextOffset(offset));
106 Register index = InputRegister(NextOffset(offset));
107 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
108 int32_t disp = InputInt32(NextOffset(offset));
109 return Operand(base, index, scale, disp);
110 }
111 case kMode_M1: {
112 Register base = InputRegister(NextOffset(offset));
113 int32_t disp = 0;
114 return Operand(base, disp);
115 }
116 case kMode_M2:
117 UNREACHABLE(); // Should use kModeMR with more compact encoding instead
118 return Operand(no_reg, 0);
119 case kMode_M4:
120 case kMode_M8: {
121 Register index = InputRegister(NextOffset(offset));
122 ScaleFactor scale = ScaleFor(kMode_M1, mode);
123 int32_t disp = 0;
124 return Operand(index, scale, disp);
125 }
126 case kMode_M1I:
127 case kMode_M2I:
128 case kMode_M4I:
129 case kMode_M8I: {
130 Register index = InputRegister(NextOffset(offset));
131 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
132 int32_t disp = InputInt32(NextOffset(offset));
133 return Operand(index, scale, disp);
134 }
135 case kMode_None:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136 UNREACHABLE();
137 return Operand(no_reg, 0);
138 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400139 UNREACHABLE();
140 return Operand(no_reg, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141 }
142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 Operand MemoryOperand(size_t first_input = 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 return MemoryOperand(&first_input);
145 }
146};
147
148
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400149namespace {
150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151bool HasImmediateInput(Instruction* instr, size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 return instr->InputAt(index)->IsImmediate();
153}
154
155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400157 public:
158 OutOfLineLoadZero(CodeGenerator* gen, Register result)
159 : OutOfLineCode(gen), result_(result) {}
160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161 void Generate() final { __ xorl(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400162
163 private:
164 Register const result_;
165};
166
167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168class OutOfLineLoadNaN final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400169 public:
170 OutOfLineLoadNaN(CodeGenerator* gen, XMMRegister result)
171 : OutOfLineCode(gen), result_(result) {}
172
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 void Generate() final { __ Pcmpeqd(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400174
175 private:
176 XMMRegister const result_;
177};
178
179
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 public:
182 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
183 XMMRegister input)
184 : OutOfLineCode(gen), result_(result), input_(input) {}
185
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000186 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400187 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000188 __ Movsd(MemOperand(rsp, 0), input_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400189 __ SlowTruncateToI(result_, rsp, 0);
190 __ addp(rsp, Immediate(kDoubleSize));
191 }
192
193 private:
194 Register const result_;
195 XMMRegister const input_;
196};
197
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198
199class OutOfLineRecordWrite final : public OutOfLineCode {
200 public:
201 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
202 Register value, Register scratch0, Register scratch1,
203 RecordWriteMode mode)
204 : OutOfLineCode(gen),
205 object_(object),
206 operand_(operand),
207 value_(value),
208 scratch0_(scratch0),
209 scratch1_(scratch1),
210 mode_(mode) {}
211
212 void Generate() final {
213 if (mode_ > RecordWriteMode::kValueIsPointer) {
214 __ JumpIfSmi(value_, exit());
215 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100216 __ CheckPageFlag(value_, scratch0_,
217 MemoryChunk::kPointersToHereAreInterestingMask, zero,
218 exit());
219 RememberedSetAction const remembered_set_action =
220 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
221 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 SaveFPRegsMode const save_fp_mode =
223 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
224 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100225 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 __ leap(scratch1_, operand_);
227 __ CallStub(&stub);
228 }
229
230 private:
231 Register const object_;
232 Operand const operand_;
233 Register const value_;
234 Register const scratch0_;
235 Register const scratch1_;
236 RecordWriteMode const mode_;
237};
238
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400239} // namespace
240
241
242#define ASSEMBLE_UNOP(asm_instr) \
243 do { \
244 if (instr->Output()->IsRegister()) { \
245 __ asm_instr(i.OutputRegister()); \
246 } else { \
247 __ asm_instr(i.OutputOperand()); \
248 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 } while (0)
250
251
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400252#define ASSEMBLE_BINOP(asm_instr) \
253 do { \
254 if (HasImmediateInput(instr, 1)) { \
255 if (instr->InputAt(0)->IsRegister()) { \
256 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
257 } else { \
258 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
259 } \
260 } else { \
261 if (instr->InputAt(1)->IsRegister()) { \
262 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
263 } else { \
264 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
265 } \
266 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 } while (0)
268
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269#define ASSEMBLE_COMPARE(asm_instr) \
270 do { \
271 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \
272 size_t index = 0; \
273 Operand left = i.MemoryOperand(&index); \
274 if (HasImmediateInput(instr, index)) { \
275 __ asm_instr(left, i.InputImmediate(index)); \
276 } else { \
277 __ asm_instr(left, i.InputRegister(index)); \
278 } \
279 } else { \
280 if (HasImmediateInput(instr, 1)) { \
281 if (instr->InputAt(0)->IsRegister()) { \
282 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
283 } else { \
284 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
285 } \
286 } else { \
287 if (instr->InputAt(1)->IsRegister()) { \
288 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
289 } else { \
290 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
291 } \
292 } \
293 } \
294 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400296#define ASSEMBLE_MULT(asm_instr) \
297 do { \
298 if (HasImmediateInput(instr, 1)) { \
299 if (instr->InputAt(0)->IsRegister()) { \
300 __ asm_instr(i.OutputRegister(), i.InputRegister(0), \
301 i.InputImmediate(1)); \
302 } else { \
303 __ asm_instr(i.OutputRegister(), i.InputOperand(0), \
304 i.InputImmediate(1)); \
305 } \
306 } else { \
307 if (instr->InputAt(1)->IsRegister()) { \
308 __ asm_instr(i.OutputRegister(), i.InputRegister(1)); \
309 } else { \
310 __ asm_instr(i.OutputRegister(), i.InputOperand(1)); \
311 } \
312 } \
313 } while (0)
314
315
316#define ASSEMBLE_SHIFT(asm_instr, width) \
317 do { \
318 if (HasImmediateInput(instr, 1)) { \
319 if (instr->Output()->IsRegister()) { \
320 __ asm_instr(i.OutputRegister(), Immediate(i.InputInt##width(1))); \
321 } else { \
322 __ asm_instr(i.OutputOperand(), Immediate(i.InputInt##width(1))); \
323 } \
324 } else { \
325 if (instr->Output()->IsRegister()) { \
326 __ asm_instr##_cl(i.OutputRegister()); \
327 } else { \
328 __ asm_instr##_cl(i.OutputOperand()); \
329 } \
330 } \
331 } while (0)
332
333
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334#define ASSEMBLE_MOVX(asm_instr) \
335 do { \
336 if (instr->addressing_mode() != kMode_None) { \
337 __ asm_instr(i.OutputRegister(), i.MemoryOperand()); \
338 } else if (instr->InputAt(0)->IsRegister()) { \
339 __ asm_instr(i.OutputRegister(), i.InputRegister(0)); \
340 } else { \
341 __ asm_instr(i.OutputRegister(), i.InputOperand(0)); \
342 } \
343 } while (0)
344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345#define ASSEMBLE_SSE_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346 do { \
Ben Murdochc5610432016-08-08 18:44:38 +0100347 if (instr->InputAt(1)->IsFPRegister()) { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400348 __ asm_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
349 } else { \
350 __ asm_instr(i.InputDoubleRegister(0), i.InputOperand(1)); \
351 } \
352 } while (0)
353
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000354#define ASSEMBLE_SSE_UNOP(asm_instr) \
355 do { \
Ben Murdochc5610432016-08-08 18:44:38 +0100356 if (instr->InputAt(0)->IsFPRegister()) { \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000357 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
358 } else { \
359 __ asm_instr(i.OutputDoubleRegister(), i.InputOperand(0)); \
360 } \
361 } while (0)
362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000363#define ASSEMBLE_AVX_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400364 do { \
365 CpuFeatureScope avx_scope(masm(), AVX); \
Ben Murdochc5610432016-08-08 18:44:38 +0100366 if (instr->InputAt(1)->IsFPRegister()) { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400367 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
368 i.InputDoubleRegister(1)); \
369 } else { \
370 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
371 i.InputOperand(1)); \
372 } \
373 } while (0)
374
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400375#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
376 do { \
377 auto result = i.OutputDoubleRegister(); \
378 auto buffer = i.InputRegister(0); \
379 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100380 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381 OutOfLineCode* ool; \
382 if (instr->InputAt(3)->IsRegister()) { \
383 auto length = i.InputRegister(3); \
384 DCHECK_EQ(0, index2); \
385 __ cmpl(index1, length); \
386 ool = new (zone()) OutOfLineLoadNaN(this, result); \
387 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100388 auto length = i.InputUint32(3); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100389 RelocInfo::Mode rmode = i.ToConstant(instr->InputAt(3)).rmode(); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400390 DCHECK_LE(index2, length); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100391 __ cmpl(index1, Immediate(length - index2, rmode)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000392 class OutOfLineLoadFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400393 public: \
394 OutOfLineLoadFloat(CodeGenerator* gen, XMMRegister result, \
395 Register buffer, Register index1, int32_t index2, \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100396 int32_t length, RelocInfo::Mode rmode) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400397 : OutOfLineCode(gen), \
398 result_(result), \
399 buffer_(buffer), \
400 index1_(index1), \
401 index2_(index2), \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100402 length_(length), \
403 rmode_(rmode) {} \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400404 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400406 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407 __ Pcmpeqd(result_, result_); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100408 __ cmpl(kScratchRegister, Immediate(length_, rmode_)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400409 __ j(above_equal, exit()); \
410 __ asm_instr(result_, \
411 Operand(buffer_, kScratchRegister, times_1, 0)); \
412 } \
413 \
414 private: \
415 XMMRegister const result_; \
416 Register const buffer_; \
417 Register const index1_; \
418 int32_t const index2_; \
419 int32_t const length_; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100420 RelocInfo::Mode rmode_; \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400421 }; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100422 ool = new (zone()) OutOfLineLoadFloat(this, result, buffer, index1, \
423 index2, length, rmode); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400424 } \
425 __ j(above_equal, ool->entry()); \
426 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
427 __ bind(ool->exit()); \
428 } while (false)
429
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400430#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
431 do { \
432 auto result = i.OutputRegister(); \
433 auto buffer = i.InputRegister(0); \
434 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100435 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400436 OutOfLineCode* ool; \
437 if (instr->InputAt(3)->IsRegister()) { \
438 auto length = i.InputRegister(3); \
439 DCHECK_EQ(0, index2); \
440 __ cmpl(index1, length); \
441 ool = new (zone()) OutOfLineLoadZero(this, result); \
442 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100443 auto length = i.InputUint32(3); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100444 RelocInfo::Mode rmode = i.ToConstant(instr->InputAt(3)).rmode(); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400445 DCHECK_LE(index2, length); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100446 __ cmpl(index1, Immediate(length - index2, rmode)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000447 class OutOfLineLoadInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400448 public: \
449 OutOfLineLoadInteger(CodeGenerator* gen, Register result, \
450 Register buffer, Register index1, int32_t index2, \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100451 int32_t length, RelocInfo::Mode rmode) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400452 : OutOfLineCode(gen), \
453 result_(result), \
454 buffer_(buffer), \
455 index1_(index1), \
456 index2_(index2), \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100457 length_(length), \
458 rmode_(rmode) {} \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400459 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400461 Label oob; \
462 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100463 __ cmpl(kScratchRegister, Immediate(length_, rmode_)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400464 __ j(above_equal, &oob, Label::kNear); \
465 __ asm_instr(result_, \
466 Operand(buffer_, kScratchRegister, times_1, 0)); \
467 __ jmp(exit()); \
468 __ bind(&oob); \
469 __ xorl(result_, result_); \
470 } \
471 \
472 private: \
473 Register const result_; \
474 Register const buffer_; \
475 Register const index1_; \
476 int32_t const index2_; \
477 int32_t const length_; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100478 RelocInfo::Mode const rmode_; \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400479 }; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100480 ool = new (zone()) OutOfLineLoadInteger(this, result, buffer, index1, \
481 index2, length, rmode); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400482 } \
483 __ j(above_equal, ool->entry()); \
484 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
485 __ bind(ool->exit()); \
486 } while (false)
487
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400488#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
489 do { \
490 auto buffer = i.InputRegister(0); \
491 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100492 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400493 auto value = i.InputDoubleRegister(4); \
494 if (instr->InputAt(3)->IsRegister()) { \
495 auto length = i.InputRegister(3); \
496 DCHECK_EQ(0, index2); \
497 Label done; \
498 __ cmpl(index1, length); \
499 __ j(above_equal, &done, Label::kNear); \
500 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
501 __ bind(&done); \
502 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100503 auto length = i.InputUint32(3); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100504 RelocInfo::Mode rmode = i.ToConstant(instr->InputAt(3)).rmode(); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400505 DCHECK_LE(index2, length); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100506 __ cmpl(index1, Immediate(length - index2, rmode)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 class OutOfLineStoreFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400508 public: \
509 OutOfLineStoreFloat(CodeGenerator* gen, Register buffer, \
510 Register index1, int32_t index2, int32_t length, \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100511 XMMRegister value, RelocInfo::Mode rmode) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 : OutOfLineCode(gen), \
513 buffer_(buffer), \
514 index1_(index1), \
515 index2_(index2), \
516 length_(length), \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100517 value_(value), \
518 rmode_(rmode) {} \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400519 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400521 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100522 __ cmpl(kScratchRegister, Immediate(length_, rmode_)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400523 __ j(above_equal, exit()); \
524 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
525 value_); \
526 } \
527 \
528 private: \
529 Register const buffer_; \
530 Register const index1_; \
531 int32_t const index2_; \
532 int32_t const length_; \
533 XMMRegister const value_; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100534 RelocInfo::Mode rmode_; \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 }; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100536 auto ool = new (zone()) OutOfLineStoreFloat( \
537 this, buffer, index1, index2, length, value, rmode); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400538 __ j(above_equal, ool->entry()); \
539 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
540 __ bind(ool->exit()); \
541 } \
542 } while (false)
543
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400544#define ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Value) \
545 do { \
546 auto buffer = i.InputRegister(0); \
547 auto index1 = i.InputRegister(1); \
Ben Murdochc5610432016-08-08 18:44:38 +0100548 auto index2 = i.InputUint32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400549 if (instr->InputAt(3)->IsRegister()) { \
550 auto length = i.InputRegister(3); \
551 DCHECK_EQ(0, index2); \
552 Label done; \
553 __ cmpl(index1, length); \
554 __ j(above_equal, &done, Label::kNear); \
555 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
556 __ bind(&done); \
557 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100558 auto length = i.InputUint32(3); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100559 RelocInfo::Mode rmode = i.ToConstant(instr->InputAt(3)).rmode(); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400560 DCHECK_LE(index2, length); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100561 __ cmpl(index1, Immediate(length - index2, rmode)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 class OutOfLineStoreInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400563 public: \
564 OutOfLineStoreInteger(CodeGenerator* gen, Register buffer, \
565 Register index1, int32_t index2, int32_t length, \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100566 Value value, RelocInfo::Mode rmode) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400567 : OutOfLineCode(gen), \
568 buffer_(buffer), \
569 index1_(index1), \
570 index2_(index2), \
571 length_(length), \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100572 value_(value), \
573 rmode_(rmode) {} \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400574 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000575 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400576 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100577 __ cmpl(kScratchRegister, Immediate(length_, rmode_)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578 __ j(above_equal, exit()); \
579 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
580 value_); \
581 } \
582 \
583 private: \
584 Register const buffer_; \
585 Register const index1_; \
586 int32_t const index2_; \
587 int32_t const length_; \
588 Value const value_; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100589 RelocInfo::Mode rmode_; \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400590 }; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100591 auto ool = new (zone()) OutOfLineStoreInteger( \
592 this, buffer, index1, index2, length, value, rmode); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400593 __ j(above_equal, ool->entry()); \
594 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
595 __ bind(ool->exit()); \
596 } \
597 } while (false)
598
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400599#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
600 do { \
601 if (instr->InputAt(4)->IsRegister()) { \
602 Register value = i.InputRegister(4); \
603 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Register); \
604 } else { \
605 Immediate value = i.InputImmediate(4); \
606 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Immediate); \
607 } \
608 } while (false)
609
Ben Murdoch61f157c2016-09-16 13:49:30 +0100610#define ASSEMBLE_IEEE754_BINOP(name) \
611 do { \
612 __ PrepareCallCFunction(2); \
613 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
614 2); \
615 } while (false)
616
617#define ASSEMBLE_IEEE754_UNOP(name) \
618 do { \
619 __ PrepareCallCFunction(1); \
620 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
621 1); \
622 } while (false)
623
Ben Murdochda12d292016-06-02 14:46:10 +0100624void CodeGenerator::AssembleDeconstructFrame() {
625 __ movq(rsp, rbp);
626 __ popq(rbp);
627}
628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
630 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
631 if (sp_slot_delta > 0) {
632 __ addq(rsp, Immediate(sp_slot_delta * kPointerSize));
633 }
634 frame_access_state()->SetFrameAccessToDefault();
635}
636
637
638void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
639 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
640 if (sp_slot_delta < 0) {
641 __ subq(rsp, Immediate(-sp_slot_delta * kPointerSize));
642 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
643 }
Ben Murdochda12d292016-06-02 14:46:10 +0100644 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 __ movq(rbp, MemOperand(rbp, 0));
646 }
647 frame_access_state()->SetFrameAccessToSP();
648}
649
Ben Murdochda12d292016-06-02 14:46:10 +0100650void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
651 Register scratch1,
652 Register scratch2,
653 Register scratch3) {
654 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
655 Label done;
656
657 // Check if current frame is an arguments adaptor frame.
658 __ Cmp(Operand(rbp, StandardFrameConstants::kContextOffset),
659 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
660 __ j(not_equal, &done, Label::kNear);
661
662 // Load arguments count from current arguments adaptor frame (note, it
663 // does not include receiver).
664 Register caller_args_count_reg = scratch1;
665 __ SmiToInteger32(
666 caller_args_count_reg,
667 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
668
669 ParameterCount callee_args_count(args_reg);
670 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
671 scratch3, ReturnAddressState::kOnStack);
672 __ bind(&done);
673}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000674
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100676CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
677 Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 X64OperandConverter i(this, instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100679 InstructionCode opcode = instr->opcode();
680 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
681 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 case kArchCallCodeObject: {
683 EnsureSpaceForLazyDeopt();
684 if (HasImmediateInput(instr, 0)) {
685 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
686 __ Call(code, RelocInfo::CODE_TARGET);
687 } else {
688 Register reg = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000689 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
690 __ call(reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000692 RecordCallPosition(instr);
693 frame_access_state()->ClearSPDelta();
694 break;
695 }
Ben Murdochda12d292016-06-02 14:46:10 +0100696 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000697 case kArchTailCallCodeObject: {
698 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
699 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100700 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
701 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
702 i.TempRegister(0), i.TempRegister(1),
703 i.TempRegister(2));
704 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705 if (HasImmediateInput(instr, 0)) {
706 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
707 __ jmp(code, RelocInfo::CODE_TARGET);
708 } else {
709 Register reg = i.InputRegister(0);
710 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
711 __ jmp(reg);
712 }
713 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 break;
715 }
Ben Murdochc5610432016-08-08 18:44:38 +0100716 case kArchTailCallAddress: {
717 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
718 AssembleDeconstructActivationRecord(stack_param_delta);
719 CHECK(!HasImmediateInput(instr, 0));
720 Register reg = i.InputRegister(0);
721 __ jmp(reg);
722 frame_access_state()->ClearSPDelta();
723 break;
724 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 case kArchCallJSFunction: {
726 EnsureSpaceForLazyDeopt();
727 Register func = i.InputRegister(0);
728 if (FLAG_debug_code) {
729 // Check the function's context matches the context argument.
730 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
731 __ Assert(equal, kWrongFunctionContext);
732 }
733 __ Call(FieldOperand(func, JSFunction::kCodeEntryOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 frame_access_state()->ClearSPDelta();
735 RecordCallPosition(instr);
736 break;
737 }
Ben Murdochda12d292016-06-02 14:46:10 +0100738 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000739 case kArchTailCallJSFunction: {
740 Register func = i.InputRegister(0);
741 if (FLAG_debug_code) {
742 // Check the function's context matches the context argument.
743 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
744 __ Assert(equal, kWrongFunctionContext);
745 }
746 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
747 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100748 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
749 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
750 i.TempRegister(0), i.TempRegister(1),
751 i.TempRegister(2));
752 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
754 frame_access_state()->ClearSPDelta();
755 break;
756 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 case kArchPrepareCallCFunction: {
758 // Frame alignment requires using FP-relative frame addressing.
759 frame_access_state()->SetFrameAccessToFP();
760 int const num_parameters = MiscField::decode(instr->opcode());
761 __ PrepareCallCFunction(num_parameters);
762 break;
763 }
764 case kArchPrepareTailCall:
765 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
766 break;
767 case kArchCallCFunction: {
768 int const num_parameters = MiscField::decode(instr->opcode());
769 if (HasImmediateInput(instr, 0)) {
770 ExternalReference ref = i.InputExternalReference(0);
771 __ CallCFunction(ref, num_parameters);
772 } else {
773 Register func = i.InputRegister(0);
774 __ CallCFunction(func, num_parameters);
775 }
776 frame_access_state()->SetFrameAccessToDefault();
777 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 break;
779 }
780 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400781 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000783 case kArchLookupSwitch:
784 AssembleArchLookupSwitch(instr);
785 break;
786 case kArchTableSwitch:
787 AssembleArchTableSwitch(instr);
788 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100789 case kArchComment: {
790 Address comment_string = i.InputExternalReference(0).address();
791 __ RecordComment(reinterpret_cast<const char*>(comment_string));
792 break;
793 }
794 case kArchDebugBreak:
795 __ int3();
796 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000797 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000798 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 // don't emit code for nops.
800 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 case kArchDeoptimize: {
802 int deopt_state_id =
803 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
804 Deoptimizer::BailoutType bailout_type =
805 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100806 CodeGenResult result =
807 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
808 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000809 break;
810 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 case kArchRet:
812 AssembleReturn();
813 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400814 case kArchStackPointer:
815 __ movq(i.OutputRegister(), rsp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000817 case kArchFramePointer:
818 __ movq(i.OutputRegister(), rbp);
819 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100820 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100821 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100822 __ movq(i.OutputRegister(), Operand(rbp, 0));
823 } else {
824 __ movq(i.OutputRegister(), rbp);
825 }
826 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400827 case kArchTruncateDoubleToI: {
828 auto result = i.OutputRegister();
829 auto input = i.InputDoubleRegister(0);
830 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input);
Ben Murdochc5610432016-08-08 18:44:38 +0100831 // We use Cvttsd2siq instead of Cvttsd2si due to performance reasons. The
832 // use of Cvttsd2siq requires the movl below to avoid sign extension.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833 __ Cvttsd2siq(result, input);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400834 __ cmpq(result, Immediate(1));
835 __ j(overflow, ool->entry());
836 __ bind(ool->exit());
Ben Murdochc5610432016-08-08 18:44:38 +0100837 __ movl(result, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400838 break;
839 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000840 case kArchStoreWithWriteBarrier: {
841 RecordWriteMode mode =
842 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
843 Register object = i.InputRegister(0);
844 size_t index = 0;
845 Operand operand = i.MemoryOperand(&index);
846 Register value = i.InputRegister(index);
847 Register scratch0 = i.TempRegister(0);
848 Register scratch1 = i.TempRegister(1);
849 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
850 scratch0, scratch1, mode);
851 __ movp(operand, value);
852 __ CheckPageFlag(object, scratch0,
853 MemoryChunk::kPointersFromHereAreInterestingMask,
854 not_zero, ool->entry());
855 __ bind(ool->exit());
856 break;
857 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100858 case kArchStackSlot: {
859 FrameOffset offset =
860 frame_access_state()->GetFrameOffset(i.InputInt32(0));
861 Register base;
862 if (offset.from_stack_pointer()) {
863 base = rsp;
864 } else {
865 base = rbp;
866 }
867 __ leaq(i.OutputRegister(), Operand(base, offset.offset()));
868 break;
869 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100870 case kIeee754Float64Atan:
871 ASSEMBLE_IEEE754_UNOP(atan);
872 break;
873 case kIeee754Float64Atan2:
874 ASSEMBLE_IEEE754_BINOP(atan2);
875 break;
876 case kIeee754Float64Atanh:
877 ASSEMBLE_IEEE754_UNOP(atanh);
878 break;
879 case kIeee754Float64Cbrt:
880 ASSEMBLE_IEEE754_UNOP(cbrt);
881 break;
882 case kIeee754Float64Cos:
883 ASSEMBLE_IEEE754_UNOP(cos);
884 break;
885 case kIeee754Float64Exp:
886 ASSEMBLE_IEEE754_UNOP(exp);
887 break;
888 case kIeee754Float64Expm1:
889 ASSEMBLE_IEEE754_UNOP(expm1);
890 break;
891 case kIeee754Float64Log:
892 ASSEMBLE_IEEE754_UNOP(log);
893 break;
894 case kIeee754Float64Log1p:
895 ASSEMBLE_IEEE754_UNOP(log1p);
896 break;
897 case kIeee754Float64Log2:
898 ASSEMBLE_IEEE754_UNOP(log2);
899 break;
900 case kIeee754Float64Log10:
901 ASSEMBLE_IEEE754_UNOP(log10);
902 break;
903 case kIeee754Float64Sin:
904 ASSEMBLE_IEEE754_UNOP(sin);
905 break;
906 case kIeee754Float64Tan:
907 ASSEMBLE_IEEE754_UNOP(tan);
908 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 case kX64Add32:
910 ASSEMBLE_BINOP(addl);
911 break;
912 case kX64Add:
913 ASSEMBLE_BINOP(addq);
914 break;
915 case kX64Sub32:
916 ASSEMBLE_BINOP(subl);
917 break;
918 case kX64Sub:
919 ASSEMBLE_BINOP(subq);
920 break;
921 case kX64And32:
922 ASSEMBLE_BINOP(andl);
923 break;
924 case kX64And:
925 ASSEMBLE_BINOP(andq);
926 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100927 case kX64Cmp8:
928 ASSEMBLE_COMPARE(cmpb);
929 break;
930 case kX64Cmp16:
931 ASSEMBLE_COMPARE(cmpw);
932 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 case kX64Cmp32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100934 ASSEMBLE_COMPARE(cmpl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935 break;
936 case kX64Cmp:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100937 ASSEMBLE_COMPARE(cmpq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100939 case kX64Test8:
940 ASSEMBLE_COMPARE(testb);
941 break;
942 case kX64Test16:
943 ASSEMBLE_COMPARE(testw);
944 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 case kX64Test32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100946 ASSEMBLE_COMPARE(testl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947 break;
948 case kX64Test:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949 ASSEMBLE_COMPARE(testq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950 break;
951 case kX64Imul32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400952 ASSEMBLE_MULT(imull);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953 break;
954 case kX64Imul:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400955 ASSEMBLE_MULT(imulq);
956 break;
957 case kX64ImulHigh32:
958 if (instr->InputAt(1)->IsRegister()) {
959 __ imull(i.InputRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400961 __ imull(i.InputOperand(1));
962 }
963 break;
964 case kX64UmulHigh32:
965 if (instr->InputAt(1)->IsRegister()) {
966 __ mull(i.InputRegister(1));
967 } else {
968 __ mull(i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000969 }
970 break;
971 case kX64Idiv32:
972 __ cdq();
973 __ idivl(i.InputRegister(1));
974 break;
975 case kX64Idiv:
976 __ cqo();
977 __ idivq(i.InputRegister(1));
978 break;
979 case kX64Udiv32:
980 __ xorl(rdx, rdx);
981 __ divl(i.InputRegister(1));
982 break;
983 case kX64Udiv:
984 __ xorq(rdx, rdx);
985 __ divq(i.InputRegister(1));
986 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400987 case kX64Not:
988 ASSEMBLE_UNOP(notq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000989 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400990 case kX64Not32:
991 ASSEMBLE_UNOP(notl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400993 case kX64Neg:
994 ASSEMBLE_UNOP(negq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400996 case kX64Neg32:
997 ASSEMBLE_UNOP(negl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000999 case kX64Or32:
1000 ASSEMBLE_BINOP(orl);
1001 break;
1002 case kX64Or:
1003 ASSEMBLE_BINOP(orq);
1004 break;
1005 case kX64Xor32:
1006 ASSEMBLE_BINOP(xorl);
1007 break;
1008 case kX64Xor:
1009 ASSEMBLE_BINOP(xorq);
1010 break;
1011 case kX64Shl32:
1012 ASSEMBLE_SHIFT(shll, 5);
1013 break;
1014 case kX64Shl:
1015 ASSEMBLE_SHIFT(shlq, 6);
1016 break;
1017 case kX64Shr32:
1018 ASSEMBLE_SHIFT(shrl, 5);
1019 break;
1020 case kX64Shr:
1021 ASSEMBLE_SHIFT(shrq, 6);
1022 break;
1023 case kX64Sar32:
1024 ASSEMBLE_SHIFT(sarl, 5);
1025 break;
1026 case kX64Sar:
1027 ASSEMBLE_SHIFT(sarq, 6);
1028 break;
1029 case kX64Ror32:
1030 ASSEMBLE_SHIFT(rorl, 5);
1031 break;
1032 case kX64Ror:
1033 ASSEMBLE_SHIFT(rorq, 6);
1034 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 case kX64Lzcnt:
1036 if (instr->InputAt(0)->IsRegister()) {
1037 __ Lzcntq(i.OutputRegister(), i.InputRegister(0));
1038 } else {
1039 __ Lzcntq(i.OutputRegister(), i.InputOperand(0));
1040 }
1041 break;
1042 case kX64Lzcnt32:
1043 if (instr->InputAt(0)->IsRegister()) {
1044 __ Lzcntl(i.OutputRegister(), i.InputRegister(0));
1045 } else {
1046 __ Lzcntl(i.OutputRegister(), i.InputOperand(0));
1047 }
1048 break;
1049 case kX64Tzcnt:
1050 if (instr->InputAt(0)->IsRegister()) {
1051 __ Tzcntq(i.OutputRegister(), i.InputRegister(0));
1052 } else {
1053 __ Tzcntq(i.OutputRegister(), i.InputOperand(0));
1054 }
1055 break;
1056 case kX64Tzcnt32:
1057 if (instr->InputAt(0)->IsRegister()) {
1058 __ Tzcntl(i.OutputRegister(), i.InputRegister(0));
1059 } else {
1060 __ Tzcntl(i.OutputRegister(), i.InputOperand(0));
1061 }
1062 break;
1063 case kX64Popcnt:
1064 if (instr->InputAt(0)->IsRegister()) {
1065 __ Popcntq(i.OutputRegister(), i.InputRegister(0));
1066 } else {
1067 __ Popcntq(i.OutputRegister(), i.InputOperand(0));
1068 }
1069 break;
1070 case kX64Popcnt32:
1071 if (instr->InputAt(0)->IsRegister()) {
1072 __ Popcntl(i.OutputRegister(), i.InputRegister(0));
1073 } else {
1074 __ Popcntl(i.OutputRegister(), i.InputOperand(0));
1075 }
1076 break;
1077 case kSSEFloat32Cmp:
1078 ASSEMBLE_SSE_BINOP(Ucomiss);
1079 break;
1080 case kSSEFloat32Add:
1081 ASSEMBLE_SSE_BINOP(addss);
1082 break;
1083 case kSSEFloat32Sub:
1084 ASSEMBLE_SSE_BINOP(subss);
1085 break;
1086 case kSSEFloat32Mul:
1087 ASSEMBLE_SSE_BINOP(mulss);
1088 break;
1089 case kSSEFloat32Div:
1090 ASSEMBLE_SSE_BINOP(divss);
1091 // Don't delete this mov. It may improve performance on some CPUs,
1092 // when there is a (v)mulss depending on the result.
1093 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1094 break;
1095 case kSSEFloat32Abs: {
1096 // TODO(bmeurer): Use RIP relative 128-bit constants.
1097 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1098 __ psrlq(kScratchDoubleReg, 33);
1099 __ andps(i.OutputDoubleRegister(), kScratchDoubleReg);
1100 break;
1101 }
1102 case kSSEFloat32Neg: {
1103 // TODO(bmeurer): Use RIP relative 128-bit constants.
1104 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1105 __ psllq(kScratchDoubleReg, 31);
1106 __ xorps(i.OutputDoubleRegister(), kScratchDoubleReg);
1107 break;
1108 }
1109 case kSSEFloat32Sqrt:
1110 ASSEMBLE_SSE_UNOP(sqrtss);
1111 break;
1112 case kSSEFloat32Max:
1113 ASSEMBLE_SSE_BINOP(maxss);
1114 break;
1115 case kSSEFloat32Min:
1116 ASSEMBLE_SSE_BINOP(minss);
1117 break;
1118 case kSSEFloat32ToFloat64:
1119 ASSEMBLE_SSE_UNOP(Cvtss2sd);
1120 break;
1121 case kSSEFloat32Round: {
1122 CpuFeatureScope sse_scope(masm(), SSE4_1);
1123 RoundingMode const mode =
1124 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1125 __ Roundss(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
1126 break;
1127 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001128 case kSSEFloat32ToInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001129 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001130 __ Cvttss2si(i.OutputRegister(), i.InputDoubleRegister(0));
1131 } else {
1132 __ Cvttss2si(i.OutputRegister(), i.InputOperand(0));
1133 }
1134 break;
1135 case kSSEFloat32ToUint32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001136 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001137 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1138 } else {
1139 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1140 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001141 break;
1142 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001143 case kSSEFloat64Cmp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 ASSEMBLE_SSE_BINOP(Ucomisd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 case kSSEFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 ASSEMBLE_SSE_BINOP(addsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 break;
1149 case kSSEFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001150 ASSEMBLE_SSE_BINOP(subsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 break;
1152 case kSSEFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153 ASSEMBLE_SSE_BINOP(mulsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 break;
1155 case kSSEFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 ASSEMBLE_SSE_BINOP(divsd);
1157 // Don't delete this mov. It may improve performance on some CPUs,
1158 // when there is a (v)mulsd depending on the result.
1159 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001160 break;
1161 case kSSEFloat64Mod: {
1162 __ subq(rsp, Immediate(kDoubleSize));
1163 // Move values to st(0) and st(1).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001165 __ fld_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001166 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001167 __ fld_d(Operand(rsp, 0));
1168 // Loop while fprem isn't done.
1169 Label mod_loop;
1170 __ bind(&mod_loop);
1171 // This instructions traps on all kinds inputs, but we are assuming the
1172 // floating point control word is set to ignore them all.
1173 __ fprem();
1174 // The following 2 instruction implicitly use rax.
1175 __ fnstsw_ax();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001176 if (CpuFeatures::IsSupported(SAHF)) {
1177 CpuFeatureScope sahf_scope(masm(), SAHF);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 __ sahf();
1179 } else {
1180 __ shrl(rax, Immediate(8));
1181 __ andl(rax, Immediate(0xFF));
1182 __ pushq(rax);
1183 __ popfq();
1184 }
1185 __ j(parity_even, &mod_loop);
1186 // Move output to stack and clean up.
1187 __ fstp(1);
1188 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 __ Movsd(i.OutputDoubleRegister(), Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 __ addq(rsp, Immediate(kDoubleSize));
1191 break;
1192 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001193 case kSSEFloat64Max:
1194 ASSEMBLE_SSE_BINOP(maxsd);
1195 break;
1196 case kSSEFloat64Min:
1197 ASSEMBLE_SSE_BINOP(minsd);
1198 break;
1199 case kSSEFloat64Abs: {
1200 // TODO(bmeurer): Use RIP relative 128-bit constants.
1201 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1202 __ psrlq(kScratchDoubleReg, 1);
1203 __ andpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1204 break;
1205 }
1206 case kSSEFloat64Neg: {
1207 // TODO(bmeurer): Use RIP relative 128-bit constants.
1208 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1209 __ psllq(kScratchDoubleReg, 63);
1210 __ xorpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1211 break;
1212 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001213 case kSSEFloat64Sqrt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001214 ASSEMBLE_SSE_UNOP(sqrtsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216 case kSSEFloat64Round: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001217 CpuFeatureScope sse_scope(masm(), SSE4_1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001218 RoundingMode const mode =
1219 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1220 __ Roundsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 break;
1222 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001223 case kSSEFloat64ToFloat32:
1224 ASSEMBLE_SSE_UNOP(Cvtsd2ss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001225 break;
1226 case kSSEFloat64ToInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001227 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001228 __ Cvttsd2si(i.OutputRegister(), i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001229 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230 __ Cvttsd2si(i.OutputRegister(), i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001231 }
1232 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233 case kSSEFloat64ToUint32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001234 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001235 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001237 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 }
Ben Murdochda12d292016-06-02 14:46:10 +01001239 if (MiscField::decode(instr->opcode())) {
1240 __ AssertZeroExtended(i.OutputRegister());
1241 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001242 break;
1243 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244 case kSSEFloat32ToInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001245 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1247 } else {
1248 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1249 }
1250 if (instr->OutputCount() > 1) {
1251 __ Set(i.OutputRegister(1), 1);
1252 Label done;
1253 Label fail;
1254 __ Move(kScratchDoubleReg, static_cast<float>(INT64_MIN));
Ben Murdochc5610432016-08-08 18:44:38 +01001255 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001256 __ Ucomiss(kScratchDoubleReg, i.InputDoubleRegister(0));
1257 } else {
1258 __ Ucomiss(kScratchDoubleReg, i.InputOperand(0));
1259 }
1260 // If the input is NaN, then the conversion fails.
1261 __ j(parity_even, &fail);
1262 // If the input is INT64_MIN, then the conversion succeeds.
1263 __ j(equal, &done);
1264 __ cmpq(i.OutputRegister(0), Immediate(1));
1265 // If the conversion results in INT64_MIN, but the input was not
1266 // INT64_MIN, then the conversion fails.
1267 __ j(no_overflow, &done);
1268 __ bind(&fail);
1269 __ Set(i.OutputRegister(1), 0);
1270 __ bind(&done);
1271 }
1272 break;
1273 case kSSEFloat64ToInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001274 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001275 __ Cvttsd2siq(i.OutputRegister(0), i.InputDoubleRegister(0));
1276 } else {
1277 __ Cvttsd2siq(i.OutputRegister(0), i.InputOperand(0));
1278 }
1279 if (instr->OutputCount() > 1) {
1280 __ Set(i.OutputRegister(1), 1);
1281 Label done;
1282 Label fail;
1283 __ Move(kScratchDoubleReg, static_cast<double>(INT64_MIN));
Ben Murdochc5610432016-08-08 18:44:38 +01001284 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001285 __ Ucomisd(kScratchDoubleReg, i.InputDoubleRegister(0));
1286 } else {
1287 __ Ucomisd(kScratchDoubleReg, i.InputOperand(0));
1288 }
1289 // If the input is NaN, then the conversion fails.
1290 __ j(parity_even, &fail);
1291 // If the input is INT64_MIN, then the conversion succeeds.
1292 __ j(equal, &done);
1293 __ cmpq(i.OutputRegister(0), Immediate(1));
1294 // If the conversion results in INT64_MIN, but the input was not
1295 // INT64_MIN, then the conversion fails.
1296 __ j(no_overflow, &done);
1297 __ bind(&fail);
1298 __ Set(i.OutputRegister(1), 0);
1299 __ bind(&done);
1300 }
1301 break;
1302 case kSSEFloat32ToUint64: {
1303 Label done;
1304 Label success;
1305 if (instr->OutputCount() > 1) {
1306 __ Set(i.OutputRegister(1), 0);
1307 }
1308 // There does not exist a Float32ToUint64 instruction, so we have to use
1309 // the Float32ToInt64 instruction.
Ben Murdochc5610432016-08-08 18:44:38 +01001310 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001311 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1312 } else {
1313 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1314 }
1315 // Check if the result of the Float32ToInt64 conversion is positive, we
1316 // are already done.
1317 __ testq(i.OutputRegister(), i.OutputRegister());
1318 __ j(positive, &success);
1319 // The result of the first conversion was negative, which means that the
1320 // input value was not within the positive int64 range. We subtract 2^64
1321 // and convert it again to see if it is within the uint64 range.
1322 __ Move(kScratchDoubleReg, -9223372036854775808.0f);
Ben Murdochc5610432016-08-08 18:44:38 +01001323 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001324 __ addss(kScratchDoubleReg, i.InputDoubleRegister(0));
1325 } else {
1326 __ addss(kScratchDoubleReg, i.InputOperand(0));
1327 }
1328 __ Cvttss2siq(i.OutputRegister(), kScratchDoubleReg);
1329 __ testq(i.OutputRegister(), i.OutputRegister());
1330 // The only possible negative value here is 0x80000000000000000, which is
1331 // used on x64 to indicate an integer overflow.
1332 __ j(negative, &done);
1333 // The input value is within uint64 range and the second conversion worked
1334 // successfully, but we still have to undo the subtraction we did
1335 // earlier.
1336 __ Set(kScratchRegister, 0x8000000000000000);
1337 __ orq(i.OutputRegister(), kScratchRegister);
1338 __ bind(&success);
1339 if (instr->OutputCount() > 1) {
1340 __ Set(i.OutputRegister(1), 1);
1341 }
1342 __ bind(&done);
1343 break;
1344 }
1345 case kSSEFloat64ToUint64: {
1346 Label done;
1347 Label success;
1348 if (instr->OutputCount() > 1) {
1349 __ Set(i.OutputRegister(1), 0);
1350 }
1351 // There does not exist a Float64ToUint64 instruction, so we have to use
1352 // the Float64ToInt64 instruction.
Ben Murdochc5610432016-08-08 18:44:38 +01001353 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1355 } else {
1356 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
1357 }
1358 // Check if the result of the Float64ToInt64 conversion is positive, we
1359 // are already done.
1360 __ testq(i.OutputRegister(), i.OutputRegister());
1361 __ j(positive, &success);
1362 // The result of the first conversion was negative, which means that the
1363 // input value was not within the positive int64 range. We subtract 2^64
1364 // and convert it again to see if it is within the uint64 range.
1365 __ Move(kScratchDoubleReg, -9223372036854775808.0);
Ben Murdochc5610432016-08-08 18:44:38 +01001366 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 __ addsd(kScratchDoubleReg, i.InputDoubleRegister(0));
1368 } else {
1369 __ addsd(kScratchDoubleReg, i.InputOperand(0));
1370 }
1371 __ Cvttsd2siq(i.OutputRegister(), kScratchDoubleReg);
1372 __ testq(i.OutputRegister(), i.OutputRegister());
1373 // The only possible negative value here is 0x80000000000000000, which is
1374 // used on x64 to indicate an integer overflow.
1375 __ j(negative, &done);
1376 // The input value is within uint64 range and the second conversion worked
1377 // successfully, but we still have to undo the subtraction we did
1378 // earlier.
1379 __ Set(kScratchRegister, 0x8000000000000000);
1380 __ orq(i.OutputRegister(), kScratchRegister);
1381 __ bind(&success);
1382 if (instr->OutputCount() > 1) {
1383 __ Set(i.OutputRegister(1), 1);
1384 }
1385 __ bind(&done);
1386 break;
1387 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001388 case kSSEInt32ToFloat64:
1389 if (instr->InputAt(0)->IsRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001390 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001393 }
1394 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001395 case kSSEInt32ToFloat32:
1396 if (instr->InputAt(0)->IsRegister()) {
1397 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1398 } else {
1399 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1400 }
1401 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001402 case kSSEInt64ToFloat32:
1403 if (instr->InputAt(0)->IsRegister()) {
1404 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1405 } else {
1406 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1407 }
1408 break;
1409 case kSSEInt64ToFloat64:
1410 if (instr->InputAt(0)->IsRegister()) {
1411 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
1412 } else {
1413 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
1414 }
1415 break;
1416 case kSSEUint64ToFloat32:
1417 if (instr->InputAt(0)->IsRegister()) {
1418 __ movq(kScratchRegister, i.InputRegister(0));
1419 } else {
1420 __ movq(kScratchRegister, i.InputOperand(0));
1421 }
1422 __ Cvtqui2ss(i.OutputDoubleRegister(), kScratchRegister,
1423 i.TempRegister(0));
1424 break;
1425 case kSSEUint64ToFloat64:
1426 if (instr->InputAt(0)->IsRegister()) {
1427 __ movq(kScratchRegister, i.InputRegister(0));
1428 } else {
1429 __ movq(kScratchRegister, i.InputOperand(0));
1430 }
1431 __ Cvtqui2sd(i.OutputDoubleRegister(), kScratchRegister,
1432 i.TempRegister(0));
1433 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001434 case kSSEUint32ToFloat64:
1435 if (instr->InputAt(0)->IsRegister()) {
1436 __ movl(kScratchRegister, i.InputRegister(0));
1437 } else {
1438 __ movl(kScratchRegister, i.InputOperand(0));
1439 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 __ Cvtqsi2sd(i.OutputDoubleRegister(), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001441 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001442 case kSSEUint32ToFloat32:
1443 if (instr->InputAt(0)->IsRegister()) {
1444 __ movl(kScratchRegister, i.InputRegister(0));
1445 } else {
1446 __ movl(kScratchRegister, i.InputOperand(0));
1447 }
1448 __ Cvtqsi2ss(i.OutputDoubleRegister(), kScratchRegister);
1449 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001450 case kSSEFloat64ExtractLowWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001451 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 __ movl(i.OutputRegister(), i.InputOperand(0));
1453 } else {
1454 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1455 }
1456 break;
1457 case kSSEFloat64ExtractHighWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001458 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 __ movl(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
1460 } else {
1461 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1);
1462 }
1463 break;
1464 case kSSEFloat64InsertLowWord32:
1465 if (instr->InputAt(1)->IsRegister()) {
1466 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 0);
1467 } else {
1468 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0);
1469 }
1470 break;
1471 case kSSEFloat64InsertHighWord32:
1472 if (instr->InputAt(1)->IsRegister()) {
1473 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 1);
1474 } else {
1475 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1);
1476 }
1477 break;
1478 case kSSEFloat64LoadLowWord32:
1479 if (instr->InputAt(0)->IsRegister()) {
1480 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1481 } else {
1482 __ Movd(i.OutputDoubleRegister(), i.InputOperand(0));
1483 }
1484 break;
1485 case kAVXFloat32Cmp: {
1486 CpuFeatureScope avx_scope(masm(), AVX);
Ben Murdochc5610432016-08-08 18:44:38 +01001487 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 __ vucomiss(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1489 } else {
1490 __ vucomiss(i.InputDoubleRegister(0), i.InputOperand(1));
1491 }
1492 break;
1493 }
1494 case kAVXFloat32Add:
1495 ASSEMBLE_AVX_BINOP(vaddss);
1496 break;
1497 case kAVXFloat32Sub:
1498 ASSEMBLE_AVX_BINOP(vsubss);
1499 break;
1500 case kAVXFloat32Mul:
1501 ASSEMBLE_AVX_BINOP(vmulss);
1502 break;
1503 case kAVXFloat32Div:
1504 ASSEMBLE_AVX_BINOP(vdivss);
1505 // Don't delete this mov. It may improve performance on some CPUs,
1506 // when there is a (v)mulss depending on the result.
1507 __ Movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1508 break;
1509 case kAVXFloat32Max:
1510 ASSEMBLE_AVX_BINOP(vmaxss);
1511 break;
1512 case kAVXFloat32Min:
1513 ASSEMBLE_AVX_BINOP(vminss);
1514 break;
1515 case kAVXFloat64Cmp: {
1516 CpuFeatureScope avx_scope(masm(), AVX);
Ben Murdochc5610432016-08-08 18:44:38 +01001517 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 __ vucomisd(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1519 } else {
1520 __ vucomisd(i.InputDoubleRegister(0), i.InputOperand(1));
1521 }
1522 break;
1523 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001524 case kAVXFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 ASSEMBLE_AVX_BINOP(vaddsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001526 break;
1527 case kAVXFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 ASSEMBLE_AVX_BINOP(vsubsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001529 break;
1530 case kAVXFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001531 ASSEMBLE_AVX_BINOP(vmulsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001532 break;
1533 case kAVXFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 ASSEMBLE_AVX_BINOP(vdivsd);
1535 // Don't delete this mov. It may improve performance on some CPUs,
1536 // when there is a (v)mulsd depending on the result.
1537 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001538 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001539 case kAVXFloat64Max:
1540 ASSEMBLE_AVX_BINOP(vmaxsd);
1541 break;
1542 case kAVXFloat64Min:
1543 ASSEMBLE_AVX_BINOP(vminsd);
1544 break;
1545 case kAVXFloat32Abs: {
1546 // TODO(bmeurer): Use RIP relative 128-bit constants.
1547 CpuFeatureScope avx_scope(masm(), AVX);
1548 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1549 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 33);
Ben Murdochc5610432016-08-08 18:44:38 +01001550 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001551 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1552 i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001553 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1555 i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001556 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557 break;
1558 }
1559 case kAVXFloat32Neg: {
1560 // TODO(bmeurer): Use RIP relative 128-bit constants.
1561 CpuFeatureScope avx_scope(masm(), AVX);
1562 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1563 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 31);
Ben Murdochc5610432016-08-08 18:44:38 +01001564 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001565 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1566 i.InputDoubleRegister(0));
1567 } else {
1568 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1569 i.InputOperand(0));
1570 }
1571 break;
1572 }
1573 case kAVXFloat64Abs: {
1574 // TODO(bmeurer): Use RIP relative 128-bit constants.
1575 CpuFeatureScope avx_scope(masm(), AVX);
1576 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1577 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001578 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001579 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1580 i.InputDoubleRegister(0));
1581 } else {
1582 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1583 i.InputOperand(0));
1584 }
1585 break;
1586 }
1587 case kAVXFloat64Neg: {
1588 // TODO(bmeurer): Use RIP relative 128-bit constants.
1589 CpuFeatureScope avx_scope(masm(), AVX);
1590 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1591 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 63);
Ben Murdochc5610432016-08-08 18:44:38 +01001592 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1594 i.InputDoubleRegister(0));
1595 } else {
1596 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1597 i.InputOperand(0));
1598 }
1599 break;
1600 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01001601 case kSSEFloat64SilenceNaN:
1602 __ Xorpd(kScratchDoubleReg, kScratchDoubleReg);
1603 __ Subsd(i.InputDoubleRegister(0), kScratchDoubleReg);
1604 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001605 case kX64Movsxbl:
1606 ASSEMBLE_MOVX(movsxbl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001607 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608 break;
1609 case kX64Movzxbl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 ASSEMBLE_MOVX(movzxbl);
1611 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 break;
1613 case kX64Movb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001614 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001615 Operand operand = i.MemoryOperand(&index);
1616 if (HasImmediateInput(instr, index)) {
1617 __ movb(operand, Immediate(i.InputInt8(index)));
1618 } else {
1619 __ movb(operand, i.InputRegister(index));
1620 }
1621 break;
1622 }
1623 case kX64Movsxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001624 ASSEMBLE_MOVX(movsxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001625 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626 break;
1627 case kX64Movzxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 ASSEMBLE_MOVX(movzxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001629 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 break;
1631 case kX64Movw: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 Operand operand = i.MemoryOperand(&index);
1634 if (HasImmediateInput(instr, index)) {
1635 __ movw(operand, Immediate(i.InputInt16(index)));
1636 } else {
1637 __ movw(operand, i.InputRegister(index));
1638 }
1639 break;
1640 }
1641 case kX64Movl:
1642 if (instr->HasOutput()) {
1643 if (instr->addressing_mode() == kMode_None) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001644 if (instr->InputAt(0)->IsRegister()) {
1645 __ movl(i.OutputRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001647 __ movl(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 }
1649 } else {
1650 __ movl(i.OutputRegister(), i.MemoryOperand());
1651 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001652 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 Operand operand = i.MemoryOperand(&index);
1656 if (HasImmediateInput(instr, index)) {
1657 __ movl(operand, i.InputImmediate(index));
1658 } else {
1659 __ movl(operand, i.InputRegister(index));
1660 }
1661 }
1662 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663 case kX64Movsxlq:
1664 ASSEMBLE_MOVX(movsxlq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001666 case kX64Movq:
1667 if (instr->HasOutput()) {
1668 __ movq(i.OutputRegister(), i.MemoryOperand());
1669 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001670 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 Operand operand = i.MemoryOperand(&index);
1672 if (HasImmediateInput(instr, index)) {
1673 __ movq(operand, i.InputImmediate(index));
1674 } else {
1675 __ movq(operand, i.InputRegister(index));
1676 }
1677 }
1678 break;
1679 case kX64Movss:
1680 if (instr->HasOutput()) {
1681 __ movss(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001683 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001684 Operand operand = i.MemoryOperand(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001685 __ movss(operand, i.InputDoubleRegister(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 }
1687 break;
1688 case kX64Movsd:
1689 if (instr->HasOutput()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 __ Movsd(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 Operand operand = i.MemoryOperand(&index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001694 __ Movsd(operand, i.InputDoubleRegister(index));
1695 }
1696 break;
1697 case kX64BitcastFI:
Ben Murdochc5610432016-08-08 18:44:38 +01001698 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 __ movl(i.OutputRegister(), i.InputOperand(0));
1700 } else {
1701 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1702 }
1703 break;
1704 case kX64BitcastDL:
Ben Murdochc5610432016-08-08 18:44:38 +01001705 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 __ movq(i.OutputRegister(), i.InputOperand(0));
1707 } else {
1708 __ Movq(i.OutputRegister(), i.InputDoubleRegister(0));
1709 }
1710 break;
1711 case kX64BitcastIF:
1712 if (instr->InputAt(0)->IsRegister()) {
1713 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1714 } else {
1715 __ movss(i.OutputDoubleRegister(), i.InputOperand(0));
1716 }
1717 break;
1718 case kX64BitcastLD:
1719 if (instr->InputAt(0)->IsRegister()) {
1720 __ Movq(i.OutputDoubleRegister(), i.InputRegister(0));
1721 } else {
1722 __ Movsd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723 }
1724 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001725 case kX64Lea32: {
1726 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1727 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1728 // and addressing mode just happens to work out. The "addl"/"subl" forms
1729 // in these cases are faster based on measurements.
1730 if (i.InputRegister(0).is(i.OutputRegister())) {
1731 if (mode == kMode_MRI) {
1732 int32_t constant_summand = i.InputInt32(1);
1733 if (constant_summand > 0) {
1734 __ addl(i.OutputRegister(), Immediate(constant_summand));
1735 } else if (constant_summand < 0) {
1736 __ subl(i.OutputRegister(), Immediate(-constant_summand));
1737 }
1738 } else if (mode == kMode_MR1) {
1739 if (i.InputRegister(1).is(i.OutputRegister())) {
1740 __ shll(i.OutputRegister(), Immediate(1));
1741 } else {
1742 __ leal(i.OutputRegister(), i.MemoryOperand());
1743 }
1744 } else if (mode == kMode_M2) {
1745 __ shll(i.OutputRegister(), Immediate(1));
1746 } else if (mode == kMode_M4) {
1747 __ shll(i.OutputRegister(), Immediate(2));
1748 } else if (mode == kMode_M8) {
1749 __ shll(i.OutputRegister(), Immediate(3));
1750 } else {
1751 __ leal(i.OutputRegister(), i.MemoryOperand());
1752 }
1753 } else {
1754 __ leal(i.OutputRegister(), i.MemoryOperand());
1755 }
1756 __ AssertZeroExtended(i.OutputRegister());
1757 break;
1758 }
1759 case kX64Lea:
1760 __ leaq(i.OutputRegister(), i.MemoryOperand());
1761 break;
1762 case kX64Dec32:
1763 __ decl(i.OutputRegister());
1764 break;
1765 case kX64Inc32:
1766 __ incl(i.OutputRegister());
1767 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001768 case kX64Push:
1769 if (HasImmediateInput(instr, 0)) {
1770 __ pushq(i.InputImmediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001772 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001773 if (instr->InputAt(0)->IsRegister()) {
1774 __ pushq(i.InputRegister(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001775 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochc5610432016-08-08 18:44:38 +01001776 } else if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001777 // TODO(titzer): use another machine instruction?
1778 __ subq(rsp, Immediate(kDoubleSize));
1779 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1780 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001782 __ pushq(i.InputOperand(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784 }
1785 }
1786 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787 case kX64Poke: {
1788 int const slot = MiscField::decode(instr->opcode());
1789 if (HasImmediateInput(instr, 0)) {
1790 __ movq(Operand(rsp, slot * kPointerSize), i.InputImmediate(0));
1791 } else {
1792 __ movq(Operand(rsp, slot * kPointerSize), i.InputRegister(0));
1793 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001794 break;
1795 }
Ben Murdochc5610432016-08-08 18:44:38 +01001796 case kX64Xchgb: {
1797 size_t index = 0;
1798 Operand operand = i.MemoryOperand(&index);
1799 __ xchgb(i.InputRegister(index), operand);
1800 break;
1801 }
1802 case kX64Xchgw: {
1803 size_t index = 0;
1804 Operand operand = i.MemoryOperand(&index);
1805 __ xchgw(i.InputRegister(index), operand);
1806 break;
1807 }
1808 case kX64Xchgl: {
1809 size_t index = 0;
1810 Operand operand = i.MemoryOperand(&index);
1811 __ xchgl(i.InputRegister(index), operand);
1812 break;
1813 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001814 case kCheckedLoadInt8:
1815 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxbl);
1816 break;
1817 case kCheckedLoadUint8:
1818 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxbl);
1819 break;
1820 case kCheckedLoadInt16:
1821 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxwl);
1822 break;
1823 case kCheckedLoadUint16:
1824 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxwl);
1825 break;
1826 case kCheckedLoadWord32:
1827 ASSEMBLE_CHECKED_LOAD_INTEGER(movl);
1828 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001829 case kCheckedLoadWord64:
1830 ASSEMBLE_CHECKED_LOAD_INTEGER(movq);
1831 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001832 case kCheckedLoadFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833 ASSEMBLE_CHECKED_LOAD_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001834 break;
1835 case kCheckedLoadFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836 ASSEMBLE_CHECKED_LOAD_FLOAT(Movsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001837 break;
1838 case kCheckedStoreWord8:
1839 ASSEMBLE_CHECKED_STORE_INTEGER(movb);
1840 break;
1841 case kCheckedStoreWord16:
1842 ASSEMBLE_CHECKED_STORE_INTEGER(movw);
1843 break;
1844 case kCheckedStoreWord32:
1845 ASSEMBLE_CHECKED_STORE_INTEGER(movl);
1846 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001847 case kCheckedStoreWord64:
1848 ASSEMBLE_CHECKED_STORE_INTEGER(movq);
1849 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001850 case kCheckedStoreFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 ASSEMBLE_CHECKED_STORE_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001852 break;
1853 case kCheckedStoreFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001854 ASSEMBLE_CHECKED_STORE_FLOAT(Movsd);
1855 break;
1856 case kX64StackCheck:
1857 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001858 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001859 case kAtomicLoadInt8:
1860 case kAtomicLoadUint8:
1861 case kAtomicLoadInt16:
1862 case kAtomicLoadUint16:
1863 case kAtomicLoadWord32:
1864 case kAtomicStoreWord8:
1865 case kAtomicStoreWord16:
1866 case kAtomicStoreWord32:
1867 UNREACHABLE(); // Won't be generated by instruction selector.
1868 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001869 }
Ben Murdochc5610432016-08-08 18:44:38 +01001870 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872
1873
1874// Assembles branches after this instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001875void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876 X64OperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001877 Label::Distance flabel_distance =
1878 branch->fallthru ? Label::kNear : Label::kFar;
1879 Label* tlabel = branch->true_label;
1880 Label* flabel = branch->false_label;
1881 switch (branch->condition) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 case kUnorderedEqual:
1883 __ j(parity_even, flabel, flabel_distance);
1884 // Fall through.
1885 case kEqual:
1886 __ j(equal, tlabel);
1887 break;
1888 case kUnorderedNotEqual:
1889 __ j(parity_even, tlabel);
1890 // Fall through.
1891 case kNotEqual:
1892 __ j(not_equal, tlabel);
1893 break;
1894 case kSignedLessThan:
1895 __ j(less, tlabel);
1896 break;
1897 case kSignedGreaterThanOrEqual:
1898 __ j(greater_equal, tlabel);
1899 break;
1900 case kSignedLessThanOrEqual:
1901 __ j(less_equal, tlabel);
1902 break;
1903 case kSignedGreaterThan:
1904 __ j(greater, tlabel);
1905 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 case kUnsignedLessThan:
1907 __ j(below, tlabel);
1908 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 case kUnsignedGreaterThanOrEqual:
1910 __ j(above_equal, tlabel);
1911 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 case kUnsignedLessThanOrEqual:
1913 __ j(below_equal, tlabel);
1914 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915 case kUnsignedGreaterThan:
1916 __ j(above, tlabel);
1917 break;
1918 case kOverflow:
1919 __ j(overflow, tlabel);
1920 break;
1921 case kNotOverflow:
1922 __ j(no_overflow, tlabel);
1923 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001924 default:
1925 UNREACHABLE();
1926 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001927 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001928 if (!branch->fallthru) __ jmp(flabel, flabel_distance);
1929}
1930
1931
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001933 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934}
1935
1936
1937// Assembles boolean materializations after this instruction.
1938void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1939 FlagsCondition condition) {
1940 X64OperandConverter i(this, instr);
1941 Label done;
1942
1943 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1944 // last output of the instruction.
1945 Label check;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001946 DCHECK_NE(0u, instr->OutputCount());
1947 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 Condition cc = no_condition;
1949 switch (condition) {
1950 case kUnorderedEqual:
1951 __ j(parity_odd, &check, Label::kNear);
1952 __ movl(reg, Immediate(0));
1953 __ jmp(&done, Label::kNear);
1954 // Fall through.
1955 case kEqual:
1956 cc = equal;
1957 break;
1958 case kUnorderedNotEqual:
1959 __ j(parity_odd, &check, Label::kNear);
1960 __ movl(reg, Immediate(1));
1961 __ jmp(&done, Label::kNear);
1962 // Fall through.
1963 case kNotEqual:
1964 cc = not_equal;
1965 break;
1966 case kSignedLessThan:
1967 cc = less;
1968 break;
1969 case kSignedGreaterThanOrEqual:
1970 cc = greater_equal;
1971 break;
1972 case kSignedLessThanOrEqual:
1973 cc = less_equal;
1974 break;
1975 case kSignedGreaterThan:
1976 cc = greater;
1977 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978 case kUnsignedLessThan:
1979 cc = below;
1980 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 case kUnsignedGreaterThanOrEqual:
1982 cc = above_equal;
1983 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 case kUnsignedLessThanOrEqual:
1985 cc = below_equal;
1986 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001987 case kUnsignedGreaterThan:
1988 cc = above;
1989 break;
1990 case kOverflow:
1991 cc = overflow;
1992 break;
1993 case kNotOverflow:
1994 cc = no_overflow;
1995 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 default:
1997 UNREACHABLE();
1998 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001999 }
2000 __ bind(&check);
2001 __ setcc(cc, reg);
2002 __ movzxbl(reg, reg);
2003 __ bind(&done);
2004}
2005
2006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
2008 X64OperandConverter i(this, instr);
2009 Register input = i.InputRegister(0);
2010 for (size_t index = 2; index < instr->InputCount(); index += 2) {
2011 __ cmpl(input, Immediate(i.InputInt32(index + 0)));
2012 __ j(equal, GetLabel(i.InputRpo(index + 1)));
2013 }
2014 AssembleArchJump(i.InputRpo(1));
2015}
2016
2017
2018void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
2019 X64OperandConverter i(this, instr);
2020 Register input = i.InputRegister(0);
2021 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
2022 Label** cases = zone()->NewArray<Label*>(case_count);
2023 for (int32_t index = 0; index < case_count; ++index) {
2024 cases[index] = GetLabel(i.InputRpo(index + 2));
2025 }
2026 Label* const table = AddJumpTable(cases, case_count);
2027 __ cmpl(input, Immediate(case_count));
2028 __ j(above_equal, GetLabel(i.InputRpo(1)));
2029 __ leaq(kScratchRegister, Operand(table));
2030 __ jmp(Operand(kScratchRegister, input, times_8, 0));
2031}
2032
Ben Murdochc5610432016-08-08 18:44:38 +01002033CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002034 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002035 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 isolate(), deoptimization_id, bailout_type);
Ben Murdochc5610432016-08-08 18:44:38 +01002037 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002038 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01002039 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040}
2041
2042
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002043namespace {
2044
2045static const int kQuadWordSize = 16;
2046
2047} // namespace
2048
Ben Murdochc5610432016-08-08 18:44:38 +01002049void CodeGenerator::FinishFrame(Frame* frame) {
2050 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051
Ben Murdochc5610432016-08-08 18:44:38 +01002052 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
2053 if (saves_fp != 0) {
2054 frame->AlignSavedCalleeRegisterSlots();
2055 if (saves_fp != 0) { // Save callee-saved XMM registers.
2056 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2057 frame->AllocateSavedCalleeRegisterSlots(saves_fp_count *
2058 (kQuadWordSize / kPointerSize));
2059 }
2060 }
2061 const RegList saves = descriptor->CalleeSavedRegisters();
2062 if (saves != 0) { // Save callee-saved registers.
2063 int count = 0;
2064 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2065 if (((1 << i) & saves)) {
2066 ++count;
2067 }
2068 }
2069 frame->AllocateSavedCalleeRegisterSlots(count);
2070 }
2071}
2072
2073void CodeGenerator::AssembleConstructFrame() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01002075 if (frame_access_state()->has_frame()) {
2076 if (descriptor->IsCFunctionCall()) {
2077 __ pushq(rbp);
2078 __ movq(rbp, rsp);
2079 } else if (descriptor->IsJSFunctionCall()) {
2080 __ Prologue(this->info()->GeneratePreagedPrologue());
2081 } else {
2082 __ StubPrologue(info()->GetOutputStackFrameType());
2083 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002084 }
Ben Murdochc5610432016-08-08 18:44:38 +01002085 int shrink_slots = frame()->GetSpillSlotCount();
2086
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002087 if (info()->is_osr()) {
2088 // TurboFan OSR-compiled functions cannot be entered directly.
2089 __ Abort(kShouldNotDirectlyEnterOsrFunction);
2090
2091 // Unoptimized code jumps directly to this entrypoint while the unoptimized
2092 // frame is still on the stack. Optimized code uses OSR values directly from
2093 // the unoptimized frame. Thus, all that needs to be done is to allocate the
2094 // remaining stack slots.
2095 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
2096 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01002097 shrink_slots -= static_cast<int>(OsrHelper(info()).UnoptimizedFrameSlots());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098 }
2099
2100 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01002101 if (shrink_slots > 0) {
2102 __ subq(rsp, Immediate(shrink_slots * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002103 }
2104
2105 if (saves_fp != 0) { // Save callee-saved XMM registers.
2106 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2107 const int stack_size = saves_fp_count * kQuadWordSize;
2108 // Adjust the stack pointer.
2109 __ subp(rsp, Immediate(stack_size));
2110 // Store the registers on the stack.
2111 int slot_idx = 0;
2112 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
2113 if (!((1 << i) & saves_fp)) continue;
2114 __ movdqu(Operand(rsp, kQuadWordSize * slot_idx),
2115 XMMRegister::from_code(i));
2116 slot_idx++;
2117 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 }
2119
2120 const RegList saves = descriptor->CalleeSavedRegisters();
2121 if (saves != 0) { // Save callee-saved registers.
2122 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2123 if (!((1 << i) & saves)) continue;
2124 __ pushq(Register::from_code(i));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002126 }
2127}
2128
2129
2130void CodeGenerator::AssembleReturn() {
2131 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002132
2133 // Restore registers.
2134 const RegList saves = descriptor->CalleeSavedRegisters();
2135 if (saves != 0) {
2136 for (int i = 0; i < Register::kNumRegisters; i++) {
2137 if (!((1 << i) & saves)) continue;
2138 __ popq(Register::from_code(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002139 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 }
2141 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
2142 if (saves_fp != 0) {
2143 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2144 const int stack_size = saves_fp_count * kQuadWordSize;
2145 // Load the registers from the stack.
2146 int slot_idx = 0;
2147 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
2148 if (!((1 << i) & saves_fp)) continue;
2149 __ movdqu(XMMRegister::from_code(i),
2150 Operand(rsp, kQuadWordSize * slot_idx));
2151 slot_idx++;
2152 }
2153 // Adjust the stack pointer.
2154 __ addp(rsp, Immediate(stack_size));
2155 }
2156
2157 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002158 AssembleDeconstructFrame();
2159 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 // Canonicalize JSFunction return sites for now.
2161 if (return_label_.is_bound()) {
2162 __ jmp(&return_label_);
2163 return;
2164 } else {
2165 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01002166 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002167 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002168 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002169 size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
2170 // Might need rcx for scratch if pop_size is too big.
2171 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
2172 __ Ret(static_cast<int>(pop_size), rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173}
2174
2175
2176void CodeGenerator::AssembleMove(InstructionOperand* source,
2177 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002179 // Dispatch on the source and destination operand kinds. Not all
2180 // combinations are possible.
2181 if (source->IsRegister()) {
2182 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2183 Register src = g.ToRegister(source);
2184 if (destination->IsRegister()) {
2185 __ movq(g.ToRegister(destination), src);
2186 } else {
2187 __ movq(g.ToOperand(destination), src);
2188 }
2189 } else if (source->IsStackSlot()) {
2190 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2191 Operand src = g.ToOperand(source);
2192 if (destination->IsRegister()) {
2193 Register dst = g.ToRegister(destination);
2194 __ movq(dst, src);
2195 } else {
2196 // Spill on demand to use a temporary register for memory-to-memory
2197 // moves.
2198 Register tmp = kScratchRegister;
2199 Operand dst = g.ToOperand(destination);
2200 __ movq(tmp, src);
2201 __ movq(dst, tmp);
2202 }
2203 } else if (source->IsConstant()) {
2204 ConstantOperand* constant_source = ConstantOperand::cast(source);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002205 Constant src = g.ToConstant(constant_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206 if (destination->IsRegister() || destination->IsStackSlot()) {
2207 Register dst = destination->IsRegister() ? g.ToRegister(destination)
2208 : kScratchRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002209 switch (src.type()) {
Ben Murdochc5610432016-08-08 18:44:38 +01002210 case Constant::kInt32: {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002211 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
2212 src.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
Ben Murdochc5610432016-08-08 18:44:38 +01002213 __ movq(dst, src.ToInt64(), src.rmode());
2214 } else {
2215 // TODO(dcarney): don't need scratch in this case.
2216 int32_t value = src.ToInt32();
2217 if (value == 0) {
2218 __ xorl(dst, dst);
2219 } else {
2220 __ movl(dst, Immediate(value));
2221 }
2222 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223 break;
Ben Murdochc5610432016-08-08 18:44:38 +01002224 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002225 case Constant::kInt64:
Ben Murdoch61f157c2016-09-16 13:49:30 +01002226 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
2227 src.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
Ben Murdochc5610432016-08-08 18:44:38 +01002228 __ movq(dst, src.ToInt64(), src.rmode());
2229 } else {
2230 DCHECK(src.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
2231 __ Set(dst, src.ToInt64());
2232 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002234 case Constant::kFloat32:
2235 __ Move(dst,
2236 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
2237 break;
2238 case Constant::kFloat64:
2239 __ Move(dst,
2240 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
2241 break;
2242 case Constant::kExternalReference:
2243 __ Move(dst, src.ToExternalReference());
2244 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 case Constant::kHeapObject: {
2246 Handle<HeapObject> src_object = src.ToHeapObject();
2247 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01002248 int slot;
2249 if (IsMaterializableFromFrame(src_object, &slot)) {
2250 __ movp(dst, g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 } else if (IsMaterializableFromRoot(src_object, &index)) {
2252 __ LoadRoot(dst, index);
2253 } else {
2254 __ Move(dst, src_object);
2255 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002256 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002258 case Constant::kRpoNumber:
2259 UNREACHABLE(); // TODO(dcarney): load of labels on x64.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002260 break;
2261 }
2262 if (destination->IsStackSlot()) {
2263 __ movq(g.ToOperand(destination), kScratchRegister);
2264 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002265 } else if (src.type() == Constant::kFloat32) {
2266 // TODO(turbofan): Can we do better here?
2267 uint32_t src_const = bit_cast<uint32_t>(src.ToFloat32());
Ben Murdochc5610432016-08-08 18:44:38 +01002268 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002269 __ Move(g.ToDoubleRegister(destination), src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002271 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002272 Operand dst = g.ToOperand(destination);
2273 __ movl(dst, Immediate(src_const));
2274 }
2275 } else {
2276 DCHECK_EQ(Constant::kFloat64, src.type());
2277 uint64_t src_const = bit_cast<uint64_t>(src.ToFloat64());
Ben Murdochc5610432016-08-08 18:44:38 +01002278 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002279 __ Move(g.ToDoubleRegister(destination), src_const);
2280 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002281 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002282 __ movq(kScratchRegister, src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002283 __ movq(g.ToOperand(destination), kScratchRegister);
2284 }
2285 }
Ben Murdochc5610432016-08-08 18:44:38 +01002286 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287 XMMRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002288 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002290 __ Movapd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002291 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002292 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002293 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002295 }
Ben Murdochc5610432016-08-08 18:44:38 +01002296 } else if (source->IsFPStackSlot()) {
2297 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002298 Operand src = g.ToOperand(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002299 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002300 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002301 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002302 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002303 Operand dst = g.ToOperand(destination);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002304 __ Movsd(kScratchDoubleReg, src);
2305 __ Movsd(dst, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002306 }
2307 } else {
2308 UNREACHABLE();
2309 }
2310}
2311
2312
2313void CodeGenerator::AssembleSwap(InstructionOperand* source,
2314 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002315 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002316 // Dispatch on the source and destination operand kinds. Not all
2317 // combinations are possible.
2318 if (source->IsRegister() && destination->IsRegister()) {
2319 // Register-register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320 Register src = g.ToRegister(source);
2321 Register dst = g.ToRegister(destination);
2322 __ movq(kScratchRegister, src);
2323 __ movq(src, dst);
2324 __ movq(dst, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002325 } else if (source->IsRegister() && destination->IsStackSlot()) {
2326 Register src = g.ToRegister(source);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002327 __ pushq(src);
2328 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002329 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002330 __ movq(src, dst);
2331 frame_access_state()->IncreaseSPDelta(-1);
2332 dst = g.ToOperand(destination);
2333 __ popq(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002334 } else if ((source->IsStackSlot() && destination->IsStackSlot()) ||
Ben Murdochc5610432016-08-08 18:44:38 +01002335 (source->IsFPStackSlot() && destination->IsFPStackSlot())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002336 // Memory-memory.
2337 Register tmp = kScratchRegister;
2338 Operand src = g.ToOperand(source);
2339 Operand dst = g.ToOperand(destination);
2340 __ movq(tmp, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002341 __ pushq(src);
2342 frame_access_state()->IncreaseSPDelta(1);
2343 src = g.ToOperand(source);
2344 __ movq(src, tmp);
2345 frame_access_state()->IncreaseSPDelta(-1);
2346 dst = g.ToOperand(destination);
2347 __ popq(dst);
Ben Murdochc5610432016-08-08 18:44:38 +01002348 } else if (source->IsFPRegister() && destination->IsFPRegister()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002349 // XMM register-register swap.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002350 XMMRegister src = g.ToDoubleRegister(source);
2351 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002352 __ Movapd(kScratchDoubleReg, src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002353 __ Movapd(src, dst);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002354 __ Movapd(dst, kScratchDoubleReg);
Ben Murdochc5610432016-08-08 18:44:38 +01002355 } else if (source->IsFPRegister() && destination->IsFPStackSlot()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002356 // XMM register-memory swap.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002357 XMMRegister src = g.ToDoubleRegister(source);
2358 Operand dst = g.ToOperand(destination);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002359 __ Movsd(kScratchDoubleReg, src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002360 __ Movsd(src, dst);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002361 __ Movsd(dst, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002362 } else {
2363 // No other combinations are possible.
2364 UNREACHABLE();
2365 }
2366}
2367
2368
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002369void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2370 for (size_t index = 0; index < target_count; ++index) {
2371 __ dq(targets[index]);
2372 }
2373}
2374
2375
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002376void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002377 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2378 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002380
2381 int space_needed = Deoptimizer::patch_size();
2382 // Ensure that we have enough space after the previous lazy-bailout
2383 // instruction for patching the code here.
2384 int current_pc = masm()->pc_offset();
2385 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2386 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2387 __ Nop(padding_size);
2388 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002389}
2390
2391#undef __
2392
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002393} // namespace compiler
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002394} // namespace internal
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395} // namespace v8