blob: 2e4eccb48351dd787150711d9022009dff00b989 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/x64/assembler-x64.h"
13#include "src/x64/macro-assembler-x64.h"
14
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022#define kScratchDoubleReg xmm0
23
24
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025// Adds X64 specific methods for decoding operands.
26class X64OperandConverter : public InstructionOperandConverter {
27 public:
28 X64OperandConverter(CodeGenerator* gen, Instruction* instr)
29 : InstructionOperandConverter(gen, instr) {}
30
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 Immediate InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032 return ToImmediate(instr_->InputAt(index));
33 }
34
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 Operand InputOperand(size_t index, int extra = 0) {
36 return ToOperand(instr_->InputAt(index), extra);
37 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 Operand OutputOperand() { return ToOperand(instr_->Output()); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040
41 Immediate ToImmediate(InstructionOperand* operand) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 Constant constant = ToConstant(operand);
43 if (constant.type() == Constant::kFloat64) {
44 DCHECK_EQ(0, bit_cast<int64_t>(constant.ToFloat64()));
45 return Immediate(0);
46 }
47 return Immediate(constant.ToInt32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 }
49
50 Operand ToOperand(InstructionOperand* op, int extra = 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010052 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra);
53 }
54
55 Operand SlotToOperand(int slot_index, int extra = 0) {
56 FrameOffset offset = frame_access_state()->GetFrameOffset(slot_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 return Operand(offset.from_stack_pointer() ? rsp : rbp,
58 offset.offset() + extra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 }
60
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 static size_t NextOffset(size_t* offset) {
62 size_t i = *offset;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040063 (*offset)++;
64 return i;
65 }
66
67 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
68 STATIC_ASSERT(0 == static_cast<int>(times_1));
69 STATIC_ASSERT(1 == static_cast<int>(times_2));
70 STATIC_ASSERT(2 == static_cast<int>(times_4));
71 STATIC_ASSERT(3 == static_cast<int>(times_8));
72 int scale = static_cast<int>(mode - one);
73 DCHECK(scale >= 0 && scale < 4);
74 return static_cast<ScaleFactor>(scale);
75 }
76
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 Operand MemoryOperand(size_t* offset) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
79 switch (mode) {
80 case kMode_MR: {
81 Register base = InputRegister(NextOffset(offset));
82 int32_t disp = 0;
83 return Operand(base, disp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040085 case kMode_MRI: {
86 Register base = InputRegister(NextOffset(offset));
87 int32_t disp = InputInt32(NextOffset(offset));
88 return Operand(base, disp);
89 }
90 case kMode_MR1:
91 case kMode_MR2:
92 case kMode_MR4:
93 case kMode_MR8: {
94 Register base = InputRegister(NextOffset(offset));
95 Register index = InputRegister(NextOffset(offset));
96 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
97 int32_t disp = 0;
98 return Operand(base, index, scale, disp);
99 }
100 case kMode_MR1I:
101 case kMode_MR2I:
102 case kMode_MR4I:
103 case kMode_MR8I: {
104 Register base = InputRegister(NextOffset(offset));
105 Register index = InputRegister(NextOffset(offset));
106 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
107 int32_t disp = InputInt32(NextOffset(offset));
108 return Operand(base, index, scale, disp);
109 }
110 case kMode_M1: {
111 Register base = InputRegister(NextOffset(offset));
112 int32_t disp = 0;
113 return Operand(base, disp);
114 }
115 case kMode_M2:
116 UNREACHABLE(); // Should use kModeMR with more compact encoding instead
117 return Operand(no_reg, 0);
118 case kMode_M4:
119 case kMode_M8: {
120 Register index = InputRegister(NextOffset(offset));
121 ScaleFactor scale = ScaleFor(kMode_M1, mode);
122 int32_t disp = 0;
123 return Operand(index, scale, disp);
124 }
125 case kMode_M1I:
126 case kMode_M2I:
127 case kMode_M4I:
128 case kMode_M8I: {
129 Register index = InputRegister(NextOffset(offset));
130 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
131 int32_t disp = InputInt32(NextOffset(offset));
132 return Operand(index, scale, disp);
133 }
134 case kMode_None:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 UNREACHABLE();
136 return Operand(no_reg, 0);
137 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400138 UNREACHABLE();
139 return Operand(no_reg, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 }
141
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 Operand MemoryOperand(size_t first_input = 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 return MemoryOperand(&first_input);
144 }
145};
146
147
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148namespace {
149
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150bool HasImmediateInput(Instruction* instr, size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 return instr->InputAt(index)->IsImmediate();
152}
153
154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000155class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400156 public:
157 OutOfLineLoadZero(CodeGenerator* gen, Register result)
158 : OutOfLineCode(gen), result_(result) {}
159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000160 void Generate() final { __ xorl(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400161
162 private:
163 Register const result_;
164};
165
166
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000167class OutOfLineLoadNaN final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400168 public:
169 OutOfLineLoadNaN(CodeGenerator* gen, XMMRegister result)
170 : OutOfLineCode(gen), result_(result) {}
171
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000172 void Generate() final { __ Pcmpeqd(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400173
174 private:
175 XMMRegister const result_;
176};
177
178
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000179class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400180 public:
181 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
182 XMMRegister input)
183 : OutOfLineCode(gen), result_(result), input_(input) {}
184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400186 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187 __ Movsd(MemOperand(rsp, 0), input_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400188 __ SlowTruncateToI(result_, rsp, 0);
189 __ addp(rsp, Immediate(kDoubleSize));
190 }
191
192 private:
193 Register const result_;
194 XMMRegister const input_;
195};
196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197
198class OutOfLineRecordWrite final : public OutOfLineCode {
199 public:
200 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
201 Register value, Register scratch0, Register scratch1,
202 RecordWriteMode mode)
203 : OutOfLineCode(gen),
204 object_(object),
205 operand_(operand),
206 value_(value),
207 scratch0_(scratch0),
208 scratch1_(scratch1),
209 mode_(mode) {}
210
211 void Generate() final {
212 if (mode_ > RecordWriteMode::kValueIsPointer) {
213 __ JumpIfSmi(value_, exit());
214 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100215 __ CheckPageFlag(value_, scratch0_,
216 MemoryChunk::kPointersToHereAreInterestingMask, zero,
217 exit());
218 RememberedSetAction const remembered_set_action =
219 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
220 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 SaveFPRegsMode const save_fp_mode =
222 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
223 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100224 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 __ leap(scratch1_, operand_);
226 __ CallStub(&stub);
227 }
228
229 private:
230 Register const object_;
231 Operand const operand_;
232 Register const value_;
233 Register const scratch0_;
234 Register const scratch1_;
235 RecordWriteMode const mode_;
236};
237
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400238} // namespace
239
240
241#define ASSEMBLE_UNOP(asm_instr) \
242 do { \
243 if (instr->Output()->IsRegister()) { \
244 __ asm_instr(i.OutputRegister()); \
245 } else { \
246 __ asm_instr(i.OutputOperand()); \
247 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 } while (0)
249
250
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400251#define ASSEMBLE_BINOP(asm_instr) \
252 do { \
253 if (HasImmediateInput(instr, 1)) { \
254 if (instr->InputAt(0)->IsRegister()) { \
255 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
256 } else { \
257 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
258 } \
259 } else { \
260 if (instr->InputAt(1)->IsRegister()) { \
261 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
262 } else { \
263 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
264 } \
265 } \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 } while (0)
267
Ben Murdoch097c5b22016-05-18 11:27:45 +0100268#define ASSEMBLE_COMPARE(asm_instr) \
269 do { \
270 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \
271 size_t index = 0; \
272 Operand left = i.MemoryOperand(&index); \
273 if (HasImmediateInput(instr, index)) { \
274 __ asm_instr(left, i.InputImmediate(index)); \
275 } else { \
276 __ asm_instr(left, i.InputRegister(index)); \
277 } \
278 } else { \
279 if (HasImmediateInput(instr, 1)) { \
280 if (instr->InputAt(0)->IsRegister()) { \
281 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
282 } else { \
283 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
284 } \
285 } else { \
286 if (instr->InputAt(1)->IsRegister()) { \
287 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
288 } else { \
289 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
290 } \
291 } \
292 } \
293 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000294
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400295#define ASSEMBLE_MULT(asm_instr) \
296 do { \
297 if (HasImmediateInput(instr, 1)) { \
298 if (instr->InputAt(0)->IsRegister()) { \
299 __ asm_instr(i.OutputRegister(), i.InputRegister(0), \
300 i.InputImmediate(1)); \
301 } else { \
302 __ asm_instr(i.OutputRegister(), i.InputOperand(0), \
303 i.InputImmediate(1)); \
304 } \
305 } else { \
306 if (instr->InputAt(1)->IsRegister()) { \
307 __ asm_instr(i.OutputRegister(), i.InputRegister(1)); \
308 } else { \
309 __ asm_instr(i.OutputRegister(), i.InputOperand(1)); \
310 } \
311 } \
312 } while (0)
313
314
315#define ASSEMBLE_SHIFT(asm_instr, width) \
316 do { \
317 if (HasImmediateInput(instr, 1)) { \
318 if (instr->Output()->IsRegister()) { \
319 __ asm_instr(i.OutputRegister(), Immediate(i.InputInt##width(1))); \
320 } else { \
321 __ asm_instr(i.OutputOperand(), Immediate(i.InputInt##width(1))); \
322 } \
323 } else { \
324 if (instr->Output()->IsRegister()) { \
325 __ asm_instr##_cl(i.OutputRegister()); \
326 } else { \
327 __ asm_instr##_cl(i.OutputOperand()); \
328 } \
329 } \
330 } while (0)
331
332
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333#define ASSEMBLE_MOVX(asm_instr) \
334 do { \
335 if (instr->addressing_mode() != kMode_None) { \
336 __ asm_instr(i.OutputRegister(), i.MemoryOperand()); \
337 } else if (instr->InputAt(0)->IsRegister()) { \
338 __ asm_instr(i.OutputRegister(), i.InputRegister(0)); \
339 } else { \
340 __ asm_instr(i.OutputRegister(), i.InputOperand(0)); \
341 } \
342 } while (0)
343
344
345#define ASSEMBLE_SSE_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346 do { \
347 if (instr->InputAt(1)->IsDoubleRegister()) { \
348 __ asm_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
349 } else { \
350 __ asm_instr(i.InputDoubleRegister(0), i.InputOperand(1)); \
351 } \
352 } while (0)
353
354
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000355#define ASSEMBLE_SSE_UNOP(asm_instr) \
356 do { \
357 if (instr->InputAt(0)->IsDoubleRegister()) { \
358 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
359 } else { \
360 __ asm_instr(i.OutputDoubleRegister(), i.InputOperand(0)); \
361 } \
362 } while (0)
363
364
365#define ASSEMBLE_AVX_BINOP(asm_instr) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400366 do { \
367 CpuFeatureScope avx_scope(masm(), AVX); \
368 if (instr->InputAt(1)->IsDoubleRegister()) { \
369 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
370 i.InputDoubleRegister(1)); \
371 } else { \
372 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
373 i.InputOperand(1)); \
374 } \
375 } while (0)
376
377
378#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
379 do { \
380 auto result = i.OutputDoubleRegister(); \
381 auto buffer = i.InputRegister(0); \
382 auto index1 = i.InputRegister(1); \
383 auto index2 = i.InputInt32(2); \
384 OutOfLineCode* ool; \
385 if (instr->InputAt(3)->IsRegister()) { \
386 auto length = i.InputRegister(3); \
387 DCHECK_EQ(0, index2); \
388 __ cmpl(index1, length); \
389 ool = new (zone()) OutOfLineLoadNaN(this, result); \
390 } else { \
391 auto length = i.InputInt32(3); \
392 DCHECK_LE(index2, length); \
393 __ cmpq(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000394 class OutOfLineLoadFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400395 public: \
396 OutOfLineLoadFloat(CodeGenerator* gen, XMMRegister result, \
397 Register buffer, Register index1, int32_t index2, \
398 int32_t length) \
399 : OutOfLineCode(gen), \
400 result_(result), \
401 buffer_(buffer), \
402 index1_(index1), \
403 index2_(index2), \
404 length_(length) {} \
405 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000406 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400407 __ leal(kScratchRegister, Operand(index1_, index2_)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408 __ Pcmpeqd(result_, result_); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400409 __ cmpl(kScratchRegister, Immediate(length_)); \
410 __ j(above_equal, exit()); \
411 __ asm_instr(result_, \
412 Operand(buffer_, kScratchRegister, times_1, 0)); \
413 } \
414 \
415 private: \
416 XMMRegister const result_; \
417 Register const buffer_; \
418 Register const index1_; \
419 int32_t const index2_; \
420 int32_t const length_; \
421 }; \
422 ool = new (zone()) \
423 OutOfLineLoadFloat(this, result, buffer, index1, index2, length); \
424 } \
425 __ j(above_equal, ool->entry()); \
426 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
427 __ bind(ool->exit()); \
428 } while (false)
429
430
431#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
432 do { \
433 auto result = i.OutputRegister(); \
434 auto buffer = i.InputRegister(0); \
435 auto index1 = i.InputRegister(1); \
436 auto index2 = i.InputInt32(2); \
437 OutOfLineCode* ool; \
438 if (instr->InputAt(3)->IsRegister()) { \
439 auto length = i.InputRegister(3); \
440 DCHECK_EQ(0, index2); \
441 __ cmpl(index1, length); \
442 ool = new (zone()) OutOfLineLoadZero(this, result); \
443 } else { \
444 auto length = i.InputInt32(3); \
445 DCHECK_LE(index2, length); \
446 __ cmpq(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000447 class OutOfLineLoadInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400448 public: \
449 OutOfLineLoadInteger(CodeGenerator* gen, Register result, \
450 Register buffer, Register index1, int32_t index2, \
451 int32_t length) \
452 : OutOfLineCode(gen), \
453 result_(result), \
454 buffer_(buffer), \
455 index1_(index1), \
456 index2_(index2), \
457 length_(length) {} \
458 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000459 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400460 Label oob; \
461 __ leal(kScratchRegister, Operand(index1_, index2_)); \
462 __ cmpl(kScratchRegister, Immediate(length_)); \
463 __ j(above_equal, &oob, Label::kNear); \
464 __ asm_instr(result_, \
465 Operand(buffer_, kScratchRegister, times_1, 0)); \
466 __ jmp(exit()); \
467 __ bind(&oob); \
468 __ xorl(result_, result_); \
469 } \
470 \
471 private: \
472 Register const result_; \
473 Register const buffer_; \
474 Register const index1_; \
475 int32_t const index2_; \
476 int32_t const length_; \
477 }; \
478 ool = new (zone()) \
479 OutOfLineLoadInteger(this, result, buffer, index1, index2, length); \
480 } \
481 __ j(above_equal, ool->entry()); \
482 __ asm_instr(result, Operand(buffer, index1, times_1, index2)); \
483 __ bind(ool->exit()); \
484 } while (false)
485
486
487#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
488 do { \
489 auto buffer = i.InputRegister(0); \
490 auto index1 = i.InputRegister(1); \
491 auto index2 = i.InputInt32(2); \
492 auto value = i.InputDoubleRegister(4); \
493 if (instr->InputAt(3)->IsRegister()) { \
494 auto length = i.InputRegister(3); \
495 DCHECK_EQ(0, index2); \
496 Label done; \
497 __ cmpl(index1, length); \
498 __ j(above_equal, &done, Label::kNear); \
499 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
500 __ bind(&done); \
501 } else { \
502 auto length = i.InputInt32(3); \
503 DCHECK_LE(index2, length); \
504 __ cmpq(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505 class OutOfLineStoreFloat final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400506 public: \
507 OutOfLineStoreFloat(CodeGenerator* gen, Register buffer, \
508 Register index1, int32_t index2, int32_t length, \
509 XMMRegister value) \
510 : OutOfLineCode(gen), \
511 buffer_(buffer), \
512 index1_(index1), \
513 index2_(index2), \
514 length_(length), \
515 value_(value) {} \
516 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400518 __ leal(kScratchRegister, Operand(index1_, index2_)); \
519 __ cmpl(kScratchRegister, Immediate(length_)); \
520 __ j(above_equal, exit()); \
521 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
522 value_); \
523 } \
524 \
525 private: \
526 Register const buffer_; \
527 Register const index1_; \
528 int32_t const index2_; \
529 int32_t const length_; \
530 XMMRegister const value_; \
531 }; \
532 auto ool = new (zone()) \
533 OutOfLineStoreFloat(this, buffer, index1, index2, length, value); \
534 __ j(above_equal, ool->entry()); \
535 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
536 __ bind(ool->exit()); \
537 } \
538 } while (false)
539
540
541#define ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Value) \
542 do { \
543 auto buffer = i.InputRegister(0); \
544 auto index1 = i.InputRegister(1); \
545 auto index2 = i.InputInt32(2); \
546 if (instr->InputAt(3)->IsRegister()) { \
547 auto length = i.InputRegister(3); \
548 DCHECK_EQ(0, index2); \
549 Label done; \
550 __ cmpl(index1, length); \
551 __ j(above_equal, &done, Label::kNear); \
552 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
553 __ bind(&done); \
554 } else { \
555 auto length = i.InputInt32(3); \
556 DCHECK_LE(index2, length); \
557 __ cmpq(index1, Immediate(length - index2)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558 class OutOfLineStoreInteger final : public OutOfLineCode { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400559 public: \
560 OutOfLineStoreInteger(CodeGenerator* gen, Register buffer, \
561 Register index1, int32_t index2, int32_t length, \
562 Value value) \
563 : OutOfLineCode(gen), \
564 buffer_(buffer), \
565 index1_(index1), \
566 index2_(index2), \
567 length_(length), \
568 value_(value) {} \
569 \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570 void Generate() final { \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400571 __ leal(kScratchRegister, Operand(index1_, index2_)); \
572 __ cmpl(kScratchRegister, Immediate(length_)); \
573 __ j(above_equal, exit()); \
574 __ asm_instr(Operand(buffer_, kScratchRegister, times_1, 0), \
575 value_); \
576 } \
577 \
578 private: \
579 Register const buffer_; \
580 Register const index1_; \
581 int32_t const index2_; \
582 int32_t const length_; \
583 Value const value_; \
584 }; \
585 auto ool = new (zone()) \
586 OutOfLineStoreInteger(this, buffer, index1, index2, length, value); \
587 __ j(above_equal, ool->entry()); \
588 __ asm_instr(Operand(buffer, index1, times_1, index2), value); \
589 __ bind(ool->exit()); \
590 } \
591 } while (false)
592
593
594#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
595 do { \
596 if (instr->InputAt(4)->IsRegister()) { \
597 Register value = i.InputRegister(4); \
598 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Register); \
599 } else { \
600 Immediate value = i.InputImmediate(4); \
601 ASSEMBLE_CHECKED_STORE_INTEGER_IMPL(asm_instr, Immediate); \
602 } \
603 } while (false)
604
Ben Murdochda12d292016-06-02 14:46:10 +0100605void CodeGenerator::AssembleDeconstructFrame() {
606 __ movq(rsp, rbp);
607 __ popq(rbp);
608}
609
610void CodeGenerator::AssembleSetupStackPointer() {}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400611
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
613 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
614 if (sp_slot_delta > 0) {
615 __ addq(rsp, Immediate(sp_slot_delta * kPointerSize));
616 }
617 frame_access_state()->SetFrameAccessToDefault();
618}
619
620
621void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
622 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
623 if (sp_slot_delta < 0) {
624 __ subq(rsp, Immediate(-sp_slot_delta * kPointerSize));
625 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
626 }
Ben Murdochda12d292016-06-02 14:46:10 +0100627 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000628 __ movq(rbp, MemOperand(rbp, 0));
629 }
630 frame_access_state()->SetFrameAccessToSP();
631}
632
Ben Murdochda12d292016-06-02 14:46:10 +0100633void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
634 Register scratch1,
635 Register scratch2,
636 Register scratch3) {
637 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
638 Label done;
639
640 // Check if current frame is an arguments adaptor frame.
641 __ Cmp(Operand(rbp, StandardFrameConstants::kContextOffset),
642 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
643 __ j(not_equal, &done, Label::kNear);
644
645 // Load arguments count from current arguments adaptor frame (note, it
646 // does not include receiver).
647 Register caller_args_count_reg = scratch1;
648 __ SmiToInteger32(
649 caller_args_count_reg,
650 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
651
652 ParameterCount callee_args_count(args_reg);
653 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
654 scratch3, ReturnAddressState::kOnStack);
655 __ bind(&done);
656}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000657
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000658// Assembles an instruction after register allocation, producing machine code.
659void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
660 X64OperandConverter i(this, instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100661 InstructionCode opcode = instr->opcode();
662 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
663 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664 case kArchCallCodeObject: {
665 EnsureSpaceForLazyDeopt();
666 if (HasImmediateInput(instr, 0)) {
667 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
668 __ Call(code, RelocInfo::CODE_TARGET);
669 } else {
670 Register reg = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000671 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
672 __ call(reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000673 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000674 RecordCallPosition(instr);
675 frame_access_state()->ClearSPDelta();
676 break;
677 }
Ben Murdochda12d292016-06-02 14:46:10 +0100678 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 case kArchTailCallCodeObject: {
680 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
681 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100682 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
683 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
684 i.TempRegister(0), i.TempRegister(1),
685 i.TempRegister(2));
686 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 if (HasImmediateInput(instr, 0)) {
688 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
689 __ jmp(code, RelocInfo::CODE_TARGET);
690 } else {
691 Register reg = i.InputRegister(0);
692 __ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
693 __ jmp(reg);
694 }
695 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 break;
697 }
698 case kArchCallJSFunction: {
699 EnsureSpaceForLazyDeopt();
700 Register func = i.InputRegister(0);
701 if (FLAG_debug_code) {
702 // Check the function's context matches the context argument.
703 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
704 __ Assert(equal, kWrongFunctionContext);
705 }
706 __ Call(FieldOperand(func, JSFunction::kCodeEntryOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 frame_access_state()->ClearSPDelta();
708 RecordCallPosition(instr);
709 break;
710 }
Ben Murdochda12d292016-06-02 14:46:10 +0100711 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000712 case kArchTailCallJSFunction: {
713 Register func = i.InputRegister(0);
714 if (FLAG_debug_code) {
715 // Check the function's context matches the context argument.
716 __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
717 __ Assert(equal, kWrongFunctionContext);
718 }
719 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
720 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100721 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
722 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
723 i.TempRegister(0), i.TempRegister(1),
724 i.TempRegister(2));
725 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
727 frame_access_state()->ClearSPDelta();
728 break;
729 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000730 case kArchPrepareCallCFunction: {
731 // Frame alignment requires using FP-relative frame addressing.
732 frame_access_state()->SetFrameAccessToFP();
733 int const num_parameters = MiscField::decode(instr->opcode());
734 __ PrepareCallCFunction(num_parameters);
735 break;
736 }
737 case kArchPrepareTailCall:
738 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
739 break;
740 case kArchCallCFunction: {
741 int const num_parameters = MiscField::decode(instr->opcode());
742 if (HasImmediateInput(instr, 0)) {
743 ExternalReference ref = i.InputExternalReference(0);
744 __ CallCFunction(ref, num_parameters);
745 } else {
746 Register func = i.InputRegister(0);
747 __ CallCFunction(func, num_parameters);
748 }
749 frame_access_state()->SetFrameAccessToDefault();
750 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 break;
752 }
753 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400754 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000756 case kArchLookupSwitch:
757 AssembleArchLookupSwitch(instr);
758 break;
759 case kArchTableSwitch:
760 AssembleArchTableSwitch(instr);
761 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000763 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764 // don't emit code for nops.
765 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000766 case kArchDeoptimize: {
767 int deopt_state_id =
768 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
769 Deoptimizer::BailoutType bailout_type =
770 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
771 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
772 break;
773 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 case kArchRet:
775 AssembleReturn();
776 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400777 case kArchStackPointer:
778 __ movq(i.OutputRegister(), rsp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000779 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780 case kArchFramePointer:
781 __ movq(i.OutputRegister(), rbp);
782 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100783 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100784 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100785 __ movq(i.OutputRegister(), Operand(rbp, 0));
786 } else {
787 __ movq(i.OutputRegister(), rbp);
788 }
789 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400790 case kArchTruncateDoubleToI: {
791 auto result = i.OutputRegister();
792 auto input = i.InputDoubleRegister(0);
793 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 __ Cvttsd2siq(result, input);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400795 __ cmpq(result, Immediate(1));
796 __ j(overflow, ool->entry());
797 __ bind(ool->exit());
798 break;
799 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 case kArchStoreWithWriteBarrier: {
801 RecordWriteMode mode =
802 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
803 Register object = i.InputRegister(0);
804 size_t index = 0;
805 Operand operand = i.MemoryOperand(&index);
806 Register value = i.InputRegister(index);
807 Register scratch0 = i.TempRegister(0);
808 Register scratch1 = i.TempRegister(1);
809 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
810 scratch0, scratch1, mode);
811 __ movp(operand, value);
812 __ CheckPageFlag(object, scratch0,
813 MemoryChunk::kPointersFromHereAreInterestingMask,
814 not_zero, ool->entry());
815 __ bind(ool->exit());
816 break;
817 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100818 case kArchStackSlot: {
819 FrameOffset offset =
820 frame_access_state()->GetFrameOffset(i.InputInt32(0));
821 Register base;
822 if (offset.from_stack_pointer()) {
823 base = rsp;
824 } else {
825 base = rbp;
826 }
827 __ leaq(i.OutputRegister(), Operand(base, offset.offset()));
828 break;
829 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 case kX64Add32:
831 ASSEMBLE_BINOP(addl);
832 break;
833 case kX64Add:
834 ASSEMBLE_BINOP(addq);
835 break;
836 case kX64Sub32:
837 ASSEMBLE_BINOP(subl);
838 break;
839 case kX64Sub:
840 ASSEMBLE_BINOP(subq);
841 break;
842 case kX64And32:
843 ASSEMBLE_BINOP(andl);
844 break;
845 case kX64And:
846 ASSEMBLE_BINOP(andq);
847 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100848 case kX64Cmp8:
849 ASSEMBLE_COMPARE(cmpb);
850 break;
851 case kX64Cmp16:
852 ASSEMBLE_COMPARE(cmpw);
853 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 case kX64Cmp32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100855 ASSEMBLE_COMPARE(cmpl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000856 break;
857 case kX64Cmp:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100858 ASSEMBLE_COMPARE(cmpq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100860 case kX64Test8:
861 ASSEMBLE_COMPARE(testb);
862 break;
863 case kX64Test16:
864 ASSEMBLE_COMPARE(testw);
865 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000866 case kX64Test32:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100867 ASSEMBLE_COMPARE(testl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 break;
869 case kX64Test:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 ASSEMBLE_COMPARE(testq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871 break;
872 case kX64Imul32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400873 ASSEMBLE_MULT(imull);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874 break;
875 case kX64Imul:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400876 ASSEMBLE_MULT(imulq);
877 break;
878 case kX64ImulHigh32:
879 if (instr->InputAt(1)->IsRegister()) {
880 __ imull(i.InputRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400882 __ imull(i.InputOperand(1));
883 }
884 break;
885 case kX64UmulHigh32:
886 if (instr->InputAt(1)->IsRegister()) {
887 __ mull(i.InputRegister(1));
888 } else {
889 __ mull(i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890 }
891 break;
892 case kX64Idiv32:
893 __ cdq();
894 __ idivl(i.InputRegister(1));
895 break;
896 case kX64Idiv:
897 __ cqo();
898 __ idivq(i.InputRegister(1));
899 break;
900 case kX64Udiv32:
901 __ xorl(rdx, rdx);
902 __ divl(i.InputRegister(1));
903 break;
904 case kX64Udiv:
905 __ xorq(rdx, rdx);
906 __ divq(i.InputRegister(1));
907 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400908 case kX64Not:
909 ASSEMBLE_UNOP(notq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400911 case kX64Not32:
912 ASSEMBLE_UNOP(notl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400914 case kX64Neg:
915 ASSEMBLE_UNOP(negq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000916 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400917 case kX64Neg32:
918 ASSEMBLE_UNOP(negl);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 case kX64Or32:
921 ASSEMBLE_BINOP(orl);
922 break;
923 case kX64Or:
924 ASSEMBLE_BINOP(orq);
925 break;
926 case kX64Xor32:
927 ASSEMBLE_BINOP(xorl);
928 break;
929 case kX64Xor:
930 ASSEMBLE_BINOP(xorq);
931 break;
932 case kX64Shl32:
933 ASSEMBLE_SHIFT(shll, 5);
934 break;
935 case kX64Shl:
936 ASSEMBLE_SHIFT(shlq, 6);
937 break;
938 case kX64Shr32:
939 ASSEMBLE_SHIFT(shrl, 5);
940 break;
941 case kX64Shr:
942 ASSEMBLE_SHIFT(shrq, 6);
943 break;
944 case kX64Sar32:
945 ASSEMBLE_SHIFT(sarl, 5);
946 break;
947 case kX64Sar:
948 ASSEMBLE_SHIFT(sarq, 6);
949 break;
950 case kX64Ror32:
951 ASSEMBLE_SHIFT(rorl, 5);
952 break;
953 case kX64Ror:
954 ASSEMBLE_SHIFT(rorq, 6);
955 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000956 case kX64Lzcnt:
957 if (instr->InputAt(0)->IsRegister()) {
958 __ Lzcntq(i.OutputRegister(), i.InputRegister(0));
959 } else {
960 __ Lzcntq(i.OutputRegister(), i.InputOperand(0));
961 }
962 break;
963 case kX64Lzcnt32:
964 if (instr->InputAt(0)->IsRegister()) {
965 __ Lzcntl(i.OutputRegister(), i.InputRegister(0));
966 } else {
967 __ Lzcntl(i.OutputRegister(), i.InputOperand(0));
968 }
969 break;
970 case kX64Tzcnt:
971 if (instr->InputAt(0)->IsRegister()) {
972 __ Tzcntq(i.OutputRegister(), i.InputRegister(0));
973 } else {
974 __ Tzcntq(i.OutputRegister(), i.InputOperand(0));
975 }
976 break;
977 case kX64Tzcnt32:
978 if (instr->InputAt(0)->IsRegister()) {
979 __ Tzcntl(i.OutputRegister(), i.InputRegister(0));
980 } else {
981 __ Tzcntl(i.OutputRegister(), i.InputOperand(0));
982 }
983 break;
984 case kX64Popcnt:
985 if (instr->InputAt(0)->IsRegister()) {
986 __ Popcntq(i.OutputRegister(), i.InputRegister(0));
987 } else {
988 __ Popcntq(i.OutputRegister(), i.InputOperand(0));
989 }
990 break;
991 case kX64Popcnt32:
992 if (instr->InputAt(0)->IsRegister()) {
993 __ Popcntl(i.OutputRegister(), i.InputRegister(0));
994 } else {
995 __ Popcntl(i.OutputRegister(), i.InputOperand(0));
996 }
997 break;
998 case kSSEFloat32Cmp:
999 ASSEMBLE_SSE_BINOP(Ucomiss);
1000 break;
1001 case kSSEFloat32Add:
1002 ASSEMBLE_SSE_BINOP(addss);
1003 break;
1004 case kSSEFloat32Sub:
1005 ASSEMBLE_SSE_BINOP(subss);
1006 break;
1007 case kSSEFloat32Mul:
1008 ASSEMBLE_SSE_BINOP(mulss);
1009 break;
1010 case kSSEFloat32Div:
1011 ASSEMBLE_SSE_BINOP(divss);
1012 // Don't delete this mov. It may improve performance on some CPUs,
1013 // when there is a (v)mulss depending on the result.
1014 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1015 break;
1016 case kSSEFloat32Abs: {
1017 // TODO(bmeurer): Use RIP relative 128-bit constants.
1018 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1019 __ psrlq(kScratchDoubleReg, 33);
1020 __ andps(i.OutputDoubleRegister(), kScratchDoubleReg);
1021 break;
1022 }
1023 case kSSEFloat32Neg: {
1024 // TODO(bmeurer): Use RIP relative 128-bit constants.
1025 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1026 __ psllq(kScratchDoubleReg, 31);
1027 __ xorps(i.OutputDoubleRegister(), kScratchDoubleReg);
1028 break;
1029 }
1030 case kSSEFloat32Sqrt:
1031 ASSEMBLE_SSE_UNOP(sqrtss);
1032 break;
1033 case kSSEFloat32Max:
1034 ASSEMBLE_SSE_BINOP(maxss);
1035 break;
1036 case kSSEFloat32Min:
1037 ASSEMBLE_SSE_BINOP(minss);
1038 break;
1039 case kSSEFloat32ToFloat64:
1040 ASSEMBLE_SSE_UNOP(Cvtss2sd);
1041 break;
1042 case kSSEFloat32Round: {
1043 CpuFeatureScope sse_scope(masm(), SSE4_1);
1044 RoundingMode const mode =
1045 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1046 __ Roundss(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
1047 break;
1048 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001049 case kSSEFloat32ToInt32:
1050 if (instr->InputAt(0)->IsDoubleRegister()) {
1051 __ Cvttss2si(i.OutputRegister(), i.InputDoubleRegister(0));
1052 } else {
1053 __ Cvttss2si(i.OutputRegister(), i.InputOperand(0));
1054 }
1055 break;
1056 case kSSEFloat32ToUint32: {
1057 if (instr->InputAt(0)->IsDoubleRegister()) {
1058 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1059 } else {
1060 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1061 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001062 break;
1063 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001064 case kSSEFloat64Cmp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 ASSEMBLE_SSE_BINOP(Ucomisd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001067 case kSSEFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001068 ASSEMBLE_SSE_BINOP(addsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001069 break;
1070 case kSSEFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001071 ASSEMBLE_SSE_BINOP(subsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001072 break;
1073 case kSSEFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074 ASSEMBLE_SSE_BINOP(mulsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075 break;
1076 case kSSEFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 ASSEMBLE_SSE_BINOP(divsd);
1078 // Don't delete this mov. It may improve performance on some CPUs,
1079 // when there is a (v)mulsd depending on the result.
1080 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001081 break;
1082 case kSSEFloat64Mod: {
1083 __ subq(rsp, Immediate(kDoubleSize));
1084 // Move values to st(0) and st(1).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 __ fld_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001088 __ fld_d(Operand(rsp, 0));
1089 // Loop while fprem isn't done.
1090 Label mod_loop;
1091 __ bind(&mod_loop);
1092 // This instructions traps on all kinds inputs, but we are assuming the
1093 // floating point control word is set to ignore them all.
1094 __ fprem();
1095 // The following 2 instruction implicitly use rax.
1096 __ fnstsw_ax();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001097 if (CpuFeatures::IsSupported(SAHF)) {
1098 CpuFeatureScope sahf_scope(masm(), SAHF);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001099 __ sahf();
1100 } else {
1101 __ shrl(rax, Immediate(8));
1102 __ andl(rax, Immediate(0xFF));
1103 __ pushq(rax);
1104 __ popfq();
1105 }
1106 __ j(parity_even, &mod_loop);
1107 // Move output to stack and clean up.
1108 __ fstp(1);
1109 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001110 __ Movsd(i.OutputDoubleRegister(), Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001111 __ addq(rsp, Immediate(kDoubleSize));
1112 break;
1113 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001114 case kSSEFloat64Max:
1115 ASSEMBLE_SSE_BINOP(maxsd);
1116 break;
1117 case kSSEFloat64Min:
1118 ASSEMBLE_SSE_BINOP(minsd);
1119 break;
1120 case kSSEFloat64Abs: {
1121 // TODO(bmeurer): Use RIP relative 128-bit constants.
1122 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1123 __ psrlq(kScratchDoubleReg, 1);
1124 __ andpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1125 break;
1126 }
1127 case kSSEFloat64Neg: {
1128 // TODO(bmeurer): Use RIP relative 128-bit constants.
1129 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
1130 __ psllq(kScratchDoubleReg, 63);
1131 __ xorpd(i.OutputDoubleRegister(), kScratchDoubleReg);
1132 break;
1133 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001134 case kSSEFloat64Sqrt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135 ASSEMBLE_SSE_UNOP(sqrtsd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001136 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001137 case kSSEFloat64Round: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001138 CpuFeatureScope sse_scope(masm(), SSE4_1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139 RoundingMode const mode =
1140 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1141 __ Roundsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001142 break;
1143 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 case kSSEFloat64ToFloat32:
1145 ASSEMBLE_SSE_UNOP(Cvtsd2ss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001146 break;
1147 case kSSEFloat64ToInt32:
1148 if (instr->InputAt(0)->IsDoubleRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 __ Cvttsd2si(i.OutputRegister(), i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001150 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151 __ Cvttsd2si(i.OutputRegister(), i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001152 }
1153 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 case kSSEFloat64ToUint32: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001155 if (instr->InputAt(0)->IsDoubleRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001157 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001159 }
Ben Murdochda12d292016-06-02 14:46:10 +01001160 if (MiscField::decode(instr->opcode())) {
1161 __ AssertZeroExtended(i.OutputRegister());
1162 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001163 break;
1164 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 case kSSEFloat32ToInt64:
1166 if (instr->InputAt(0)->IsDoubleRegister()) {
1167 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1168 } else {
1169 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1170 }
1171 if (instr->OutputCount() > 1) {
1172 __ Set(i.OutputRegister(1), 1);
1173 Label done;
1174 Label fail;
1175 __ Move(kScratchDoubleReg, static_cast<float>(INT64_MIN));
1176 if (instr->InputAt(0)->IsDoubleRegister()) {
1177 __ Ucomiss(kScratchDoubleReg, i.InputDoubleRegister(0));
1178 } else {
1179 __ Ucomiss(kScratchDoubleReg, i.InputOperand(0));
1180 }
1181 // If the input is NaN, then the conversion fails.
1182 __ j(parity_even, &fail);
1183 // If the input is INT64_MIN, then the conversion succeeds.
1184 __ j(equal, &done);
1185 __ cmpq(i.OutputRegister(0), Immediate(1));
1186 // If the conversion results in INT64_MIN, but the input was not
1187 // INT64_MIN, then the conversion fails.
1188 __ j(no_overflow, &done);
1189 __ bind(&fail);
1190 __ Set(i.OutputRegister(1), 0);
1191 __ bind(&done);
1192 }
1193 break;
1194 case kSSEFloat64ToInt64:
1195 if (instr->InputAt(0)->IsDoubleRegister()) {
1196 __ Cvttsd2siq(i.OutputRegister(0), i.InputDoubleRegister(0));
1197 } else {
1198 __ Cvttsd2siq(i.OutputRegister(0), i.InputOperand(0));
1199 }
1200 if (instr->OutputCount() > 1) {
1201 __ Set(i.OutputRegister(1), 1);
1202 Label done;
1203 Label fail;
1204 __ Move(kScratchDoubleReg, static_cast<double>(INT64_MIN));
1205 if (instr->InputAt(0)->IsDoubleRegister()) {
1206 __ Ucomisd(kScratchDoubleReg, i.InputDoubleRegister(0));
1207 } else {
1208 __ Ucomisd(kScratchDoubleReg, i.InputOperand(0));
1209 }
1210 // If the input is NaN, then the conversion fails.
1211 __ j(parity_even, &fail);
1212 // If the input is INT64_MIN, then the conversion succeeds.
1213 __ j(equal, &done);
1214 __ cmpq(i.OutputRegister(0), Immediate(1));
1215 // If the conversion results in INT64_MIN, but the input was not
1216 // INT64_MIN, then the conversion fails.
1217 __ j(no_overflow, &done);
1218 __ bind(&fail);
1219 __ Set(i.OutputRegister(1), 0);
1220 __ bind(&done);
1221 }
1222 break;
1223 case kSSEFloat32ToUint64: {
1224 Label done;
1225 Label success;
1226 if (instr->OutputCount() > 1) {
1227 __ Set(i.OutputRegister(1), 0);
1228 }
1229 // There does not exist a Float32ToUint64 instruction, so we have to use
1230 // the Float32ToInt64 instruction.
1231 if (instr->InputAt(0)->IsDoubleRegister()) {
1232 __ Cvttss2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1233 } else {
1234 __ Cvttss2siq(i.OutputRegister(), i.InputOperand(0));
1235 }
1236 // Check if the result of the Float32ToInt64 conversion is positive, we
1237 // are already done.
1238 __ testq(i.OutputRegister(), i.OutputRegister());
1239 __ j(positive, &success);
1240 // The result of the first conversion was negative, which means that the
1241 // input value was not within the positive int64 range. We subtract 2^64
1242 // and convert it again to see if it is within the uint64 range.
1243 __ Move(kScratchDoubleReg, -9223372036854775808.0f);
1244 if (instr->InputAt(0)->IsDoubleRegister()) {
1245 __ addss(kScratchDoubleReg, i.InputDoubleRegister(0));
1246 } else {
1247 __ addss(kScratchDoubleReg, i.InputOperand(0));
1248 }
1249 __ Cvttss2siq(i.OutputRegister(), kScratchDoubleReg);
1250 __ testq(i.OutputRegister(), i.OutputRegister());
1251 // The only possible negative value here is 0x80000000000000000, which is
1252 // used on x64 to indicate an integer overflow.
1253 __ j(negative, &done);
1254 // The input value is within uint64 range and the second conversion worked
1255 // successfully, but we still have to undo the subtraction we did
1256 // earlier.
1257 __ Set(kScratchRegister, 0x8000000000000000);
1258 __ orq(i.OutputRegister(), kScratchRegister);
1259 __ bind(&success);
1260 if (instr->OutputCount() > 1) {
1261 __ Set(i.OutputRegister(1), 1);
1262 }
1263 __ bind(&done);
1264 break;
1265 }
1266 case kSSEFloat64ToUint64: {
1267 Label done;
1268 Label success;
1269 if (instr->OutputCount() > 1) {
1270 __ Set(i.OutputRegister(1), 0);
1271 }
1272 // There does not exist a Float64ToUint64 instruction, so we have to use
1273 // the Float64ToInt64 instruction.
1274 if (instr->InputAt(0)->IsDoubleRegister()) {
1275 __ Cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
1276 } else {
1277 __ Cvttsd2siq(i.OutputRegister(), i.InputOperand(0));
1278 }
1279 // Check if the result of the Float64ToInt64 conversion is positive, we
1280 // are already done.
1281 __ testq(i.OutputRegister(), i.OutputRegister());
1282 __ j(positive, &success);
1283 // The result of the first conversion was negative, which means that the
1284 // input value was not within the positive int64 range. We subtract 2^64
1285 // and convert it again to see if it is within the uint64 range.
1286 __ Move(kScratchDoubleReg, -9223372036854775808.0);
1287 if (instr->InputAt(0)->IsDoubleRegister()) {
1288 __ addsd(kScratchDoubleReg, i.InputDoubleRegister(0));
1289 } else {
1290 __ addsd(kScratchDoubleReg, i.InputOperand(0));
1291 }
1292 __ Cvttsd2siq(i.OutputRegister(), kScratchDoubleReg);
1293 __ testq(i.OutputRegister(), i.OutputRegister());
1294 // The only possible negative value here is 0x80000000000000000, which is
1295 // used on x64 to indicate an integer overflow.
1296 __ j(negative, &done);
1297 // The input value is within uint64 range and the second conversion worked
1298 // successfully, but we still have to undo the subtraction we did
1299 // earlier.
1300 __ Set(kScratchRegister, 0x8000000000000000);
1301 __ orq(i.OutputRegister(), kScratchRegister);
1302 __ bind(&success);
1303 if (instr->OutputCount() > 1) {
1304 __ Set(i.OutputRegister(1), 1);
1305 }
1306 __ bind(&done);
1307 break;
1308 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001309 case kSSEInt32ToFloat64:
1310 if (instr->InputAt(0)->IsRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001311 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 __ Cvtlsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 }
1315 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001316 case kSSEInt32ToFloat32:
1317 if (instr->InputAt(0)->IsRegister()) {
1318 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1319 } else {
1320 __ Cvtlsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1321 }
1322 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 case kSSEInt64ToFloat32:
1324 if (instr->InputAt(0)->IsRegister()) {
1325 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputRegister(0));
1326 } else {
1327 __ Cvtqsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
1328 }
1329 break;
1330 case kSSEInt64ToFloat64:
1331 if (instr->InputAt(0)->IsRegister()) {
1332 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputRegister(0));
1333 } else {
1334 __ Cvtqsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
1335 }
1336 break;
1337 case kSSEUint64ToFloat32:
1338 if (instr->InputAt(0)->IsRegister()) {
1339 __ movq(kScratchRegister, i.InputRegister(0));
1340 } else {
1341 __ movq(kScratchRegister, i.InputOperand(0));
1342 }
1343 __ Cvtqui2ss(i.OutputDoubleRegister(), kScratchRegister,
1344 i.TempRegister(0));
1345 break;
1346 case kSSEUint64ToFloat64:
1347 if (instr->InputAt(0)->IsRegister()) {
1348 __ movq(kScratchRegister, i.InputRegister(0));
1349 } else {
1350 __ movq(kScratchRegister, i.InputOperand(0));
1351 }
1352 __ Cvtqui2sd(i.OutputDoubleRegister(), kScratchRegister,
1353 i.TempRegister(0));
1354 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001355 case kSSEUint32ToFloat64:
1356 if (instr->InputAt(0)->IsRegister()) {
1357 __ movl(kScratchRegister, i.InputRegister(0));
1358 } else {
1359 __ movl(kScratchRegister, i.InputOperand(0));
1360 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 __ Cvtqsi2sd(i.OutputDoubleRegister(), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001363 case kSSEUint32ToFloat32:
1364 if (instr->InputAt(0)->IsRegister()) {
1365 __ movl(kScratchRegister, i.InputRegister(0));
1366 } else {
1367 __ movl(kScratchRegister, i.InputOperand(0));
1368 }
1369 __ Cvtqsi2ss(i.OutputDoubleRegister(), kScratchRegister);
1370 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001371 case kSSEFloat64ExtractLowWord32:
1372 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1373 __ movl(i.OutputRegister(), i.InputOperand(0));
1374 } else {
1375 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1376 }
1377 break;
1378 case kSSEFloat64ExtractHighWord32:
1379 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1380 __ movl(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
1381 } else {
1382 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1);
1383 }
1384 break;
1385 case kSSEFloat64InsertLowWord32:
1386 if (instr->InputAt(1)->IsRegister()) {
1387 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 0);
1388 } else {
1389 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0);
1390 }
1391 break;
1392 case kSSEFloat64InsertHighWord32:
1393 if (instr->InputAt(1)->IsRegister()) {
1394 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 1);
1395 } else {
1396 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1);
1397 }
1398 break;
1399 case kSSEFloat64LoadLowWord32:
1400 if (instr->InputAt(0)->IsRegister()) {
1401 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1402 } else {
1403 __ Movd(i.OutputDoubleRegister(), i.InputOperand(0));
1404 }
1405 break;
1406 case kAVXFloat32Cmp: {
1407 CpuFeatureScope avx_scope(masm(), AVX);
1408 if (instr->InputAt(1)->IsDoubleRegister()) {
1409 __ vucomiss(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1410 } else {
1411 __ vucomiss(i.InputDoubleRegister(0), i.InputOperand(1));
1412 }
1413 break;
1414 }
1415 case kAVXFloat32Add:
1416 ASSEMBLE_AVX_BINOP(vaddss);
1417 break;
1418 case kAVXFloat32Sub:
1419 ASSEMBLE_AVX_BINOP(vsubss);
1420 break;
1421 case kAVXFloat32Mul:
1422 ASSEMBLE_AVX_BINOP(vmulss);
1423 break;
1424 case kAVXFloat32Div:
1425 ASSEMBLE_AVX_BINOP(vdivss);
1426 // Don't delete this mov. It may improve performance on some CPUs,
1427 // when there is a (v)mulss depending on the result.
1428 __ Movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1429 break;
1430 case kAVXFloat32Max:
1431 ASSEMBLE_AVX_BINOP(vmaxss);
1432 break;
1433 case kAVXFloat32Min:
1434 ASSEMBLE_AVX_BINOP(vminss);
1435 break;
1436 case kAVXFloat64Cmp: {
1437 CpuFeatureScope avx_scope(masm(), AVX);
1438 if (instr->InputAt(1)->IsDoubleRegister()) {
1439 __ vucomisd(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1440 } else {
1441 __ vucomisd(i.InputDoubleRegister(0), i.InputOperand(1));
1442 }
1443 break;
1444 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001445 case kAVXFloat64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001446 ASSEMBLE_AVX_BINOP(vaddsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001447 break;
1448 case kAVXFloat64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001449 ASSEMBLE_AVX_BINOP(vsubsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001450 break;
1451 case kAVXFloat64Mul:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 ASSEMBLE_AVX_BINOP(vmulsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001453 break;
1454 case kAVXFloat64Div:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001455 ASSEMBLE_AVX_BINOP(vdivsd);
1456 // Don't delete this mov. It may improve performance on some CPUs,
1457 // when there is a (v)mulsd depending on the result.
1458 __ Movapd(i.OutputDoubleRegister(), i.OutputDoubleRegister());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001459 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001460 case kAVXFloat64Max:
1461 ASSEMBLE_AVX_BINOP(vmaxsd);
1462 break;
1463 case kAVXFloat64Min:
1464 ASSEMBLE_AVX_BINOP(vminsd);
1465 break;
1466 case kAVXFloat32Abs: {
1467 // TODO(bmeurer): Use RIP relative 128-bit constants.
1468 CpuFeatureScope avx_scope(masm(), AVX);
1469 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1470 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 33);
1471 if (instr->InputAt(0)->IsDoubleRegister()) {
1472 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1473 i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001474 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg,
1476 i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001477 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478 break;
1479 }
1480 case kAVXFloat32Neg: {
1481 // TODO(bmeurer): Use RIP relative 128-bit constants.
1482 CpuFeatureScope avx_scope(masm(), AVX);
1483 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1484 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 31);
1485 if (instr->InputAt(0)->IsDoubleRegister()) {
1486 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1487 i.InputDoubleRegister(0));
1488 } else {
1489 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg,
1490 i.InputOperand(0));
1491 }
1492 break;
1493 }
1494 case kAVXFloat64Abs: {
1495 // TODO(bmeurer): Use RIP relative 128-bit constants.
1496 CpuFeatureScope avx_scope(masm(), AVX);
1497 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1498 __ vpsrlq(kScratchDoubleReg, kScratchDoubleReg, 1);
1499 if (instr->InputAt(0)->IsDoubleRegister()) {
1500 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1501 i.InputDoubleRegister(0));
1502 } else {
1503 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1504 i.InputOperand(0));
1505 }
1506 break;
1507 }
1508 case kAVXFloat64Neg: {
1509 // TODO(bmeurer): Use RIP relative 128-bit constants.
1510 CpuFeatureScope avx_scope(masm(), AVX);
1511 __ vpcmpeqd(kScratchDoubleReg, kScratchDoubleReg, kScratchDoubleReg);
1512 __ vpsllq(kScratchDoubleReg, kScratchDoubleReg, 63);
1513 if (instr->InputAt(0)->IsDoubleRegister()) {
1514 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1515 i.InputDoubleRegister(0));
1516 } else {
1517 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg,
1518 i.InputOperand(0));
1519 }
1520 break;
1521 }
1522 case kX64Movsxbl:
1523 ASSEMBLE_MOVX(movsxbl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001524 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525 break;
1526 case kX64Movzxbl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527 ASSEMBLE_MOVX(movzxbl);
1528 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001529 break;
1530 case kX64Movb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001531 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532 Operand operand = i.MemoryOperand(&index);
1533 if (HasImmediateInput(instr, index)) {
1534 __ movb(operand, Immediate(i.InputInt8(index)));
1535 } else {
1536 __ movb(operand, i.InputRegister(index));
1537 }
1538 break;
1539 }
1540 case kX64Movsxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 ASSEMBLE_MOVX(movsxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001542 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 break;
1544 case kX64Movzxwl:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 ASSEMBLE_MOVX(movzxwl);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001546 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001547 break;
1548 case kX64Movw: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001550 Operand operand = i.MemoryOperand(&index);
1551 if (HasImmediateInput(instr, index)) {
1552 __ movw(operand, Immediate(i.InputInt16(index)));
1553 } else {
1554 __ movw(operand, i.InputRegister(index));
1555 }
1556 break;
1557 }
1558 case kX64Movl:
1559 if (instr->HasOutput()) {
1560 if (instr->addressing_mode() == kMode_None) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001561 if (instr->InputAt(0)->IsRegister()) {
1562 __ movl(i.OutputRegister(), i.InputRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001563 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001564 __ movl(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001565 }
1566 } else {
1567 __ movl(i.OutputRegister(), i.MemoryOperand());
1568 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001569 __ AssertZeroExtended(i.OutputRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001571 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572 Operand operand = i.MemoryOperand(&index);
1573 if (HasImmediateInput(instr, index)) {
1574 __ movl(operand, i.InputImmediate(index));
1575 } else {
1576 __ movl(operand, i.InputRegister(index));
1577 }
1578 }
1579 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580 case kX64Movsxlq:
1581 ASSEMBLE_MOVX(movsxlq);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001583 case kX64Movq:
1584 if (instr->HasOutput()) {
1585 __ movq(i.OutputRegister(), i.MemoryOperand());
1586 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001588 Operand operand = i.MemoryOperand(&index);
1589 if (HasImmediateInput(instr, index)) {
1590 __ movq(operand, i.InputImmediate(index));
1591 } else {
1592 __ movq(operand, i.InputRegister(index));
1593 }
1594 }
1595 break;
1596 case kX64Movss:
1597 if (instr->HasOutput()) {
1598 __ movss(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001599 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001601 Operand operand = i.MemoryOperand(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001602 __ movss(operand, i.InputDoubleRegister(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001603 }
1604 break;
1605 case kX64Movsd:
1606 if (instr->HasOutput()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 __ Movsd(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001610 Operand operand = i.MemoryOperand(&index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 __ Movsd(operand, i.InputDoubleRegister(index));
1612 }
1613 break;
1614 case kX64BitcastFI:
1615 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1616 __ movl(i.OutputRegister(), i.InputOperand(0));
1617 } else {
1618 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0));
1619 }
1620 break;
1621 case kX64BitcastDL:
1622 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1623 __ movq(i.OutputRegister(), i.InputOperand(0));
1624 } else {
1625 __ Movq(i.OutputRegister(), i.InputDoubleRegister(0));
1626 }
1627 break;
1628 case kX64BitcastIF:
1629 if (instr->InputAt(0)->IsRegister()) {
1630 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0));
1631 } else {
1632 __ movss(i.OutputDoubleRegister(), i.InputOperand(0));
1633 }
1634 break;
1635 case kX64BitcastLD:
1636 if (instr->InputAt(0)->IsRegister()) {
1637 __ Movq(i.OutputDoubleRegister(), i.InputRegister(0));
1638 } else {
1639 __ Movsd(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640 }
1641 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001642 case kX64Lea32: {
1643 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1644 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1645 // and addressing mode just happens to work out. The "addl"/"subl" forms
1646 // in these cases are faster based on measurements.
1647 if (i.InputRegister(0).is(i.OutputRegister())) {
1648 if (mode == kMode_MRI) {
1649 int32_t constant_summand = i.InputInt32(1);
1650 if (constant_summand > 0) {
1651 __ addl(i.OutputRegister(), Immediate(constant_summand));
1652 } else if (constant_summand < 0) {
1653 __ subl(i.OutputRegister(), Immediate(-constant_summand));
1654 }
1655 } else if (mode == kMode_MR1) {
1656 if (i.InputRegister(1).is(i.OutputRegister())) {
1657 __ shll(i.OutputRegister(), Immediate(1));
1658 } else {
1659 __ leal(i.OutputRegister(), i.MemoryOperand());
1660 }
1661 } else if (mode == kMode_M2) {
1662 __ shll(i.OutputRegister(), Immediate(1));
1663 } else if (mode == kMode_M4) {
1664 __ shll(i.OutputRegister(), Immediate(2));
1665 } else if (mode == kMode_M8) {
1666 __ shll(i.OutputRegister(), Immediate(3));
1667 } else {
1668 __ leal(i.OutputRegister(), i.MemoryOperand());
1669 }
1670 } else {
1671 __ leal(i.OutputRegister(), i.MemoryOperand());
1672 }
1673 __ AssertZeroExtended(i.OutputRegister());
1674 break;
1675 }
1676 case kX64Lea:
1677 __ leaq(i.OutputRegister(), i.MemoryOperand());
1678 break;
1679 case kX64Dec32:
1680 __ decl(i.OutputRegister());
1681 break;
1682 case kX64Inc32:
1683 __ incl(i.OutputRegister());
1684 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001685 case kX64Push:
1686 if (HasImmediateInput(instr, 0)) {
1687 __ pushq(i.InputImmediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001689 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001690 if (instr->InputAt(0)->IsRegister()) {
1691 __ pushq(i.InputRegister(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692 frame_access_state()->IncreaseSPDelta(1);
1693 } else if (instr->InputAt(0)->IsDoubleRegister()) {
1694 // TODO(titzer): use another machine instruction?
1695 __ subq(rsp, Immediate(kDoubleSize));
1696 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1697 __ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001699 __ pushq(i.InputOperand(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001700 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701 }
1702 }
1703 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704 case kX64Poke: {
1705 int const slot = MiscField::decode(instr->opcode());
1706 if (HasImmediateInput(instr, 0)) {
1707 __ movq(Operand(rsp, slot * kPointerSize), i.InputImmediate(0));
1708 } else {
1709 __ movq(Operand(rsp, slot * kPointerSize), i.InputRegister(0));
1710 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711 break;
1712 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001713 case kCheckedLoadInt8:
1714 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxbl);
1715 break;
1716 case kCheckedLoadUint8:
1717 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxbl);
1718 break;
1719 case kCheckedLoadInt16:
1720 ASSEMBLE_CHECKED_LOAD_INTEGER(movsxwl);
1721 break;
1722 case kCheckedLoadUint16:
1723 ASSEMBLE_CHECKED_LOAD_INTEGER(movzxwl);
1724 break;
1725 case kCheckedLoadWord32:
1726 ASSEMBLE_CHECKED_LOAD_INTEGER(movl);
1727 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001728 case kCheckedLoadWord64:
1729 ASSEMBLE_CHECKED_LOAD_INTEGER(movq);
1730 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001731 case kCheckedLoadFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 ASSEMBLE_CHECKED_LOAD_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001733 break;
1734 case kCheckedLoadFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001735 ASSEMBLE_CHECKED_LOAD_FLOAT(Movsd);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001736 break;
1737 case kCheckedStoreWord8:
1738 ASSEMBLE_CHECKED_STORE_INTEGER(movb);
1739 break;
1740 case kCheckedStoreWord16:
1741 ASSEMBLE_CHECKED_STORE_INTEGER(movw);
1742 break;
1743 case kCheckedStoreWord32:
1744 ASSEMBLE_CHECKED_STORE_INTEGER(movl);
1745 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001746 case kCheckedStoreWord64:
1747 ASSEMBLE_CHECKED_STORE_INTEGER(movq);
1748 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001749 case kCheckedStoreFloat32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001750 ASSEMBLE_CHECKED_STORE_FLOAT(Movss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001751 break;
1752 case kCheckedStoreFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001753 ASSEMBLE_CHECKED_STORE_FLOAT(Movsd);
1754 break;
1755 case kX64StackCheck:
1756 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001757 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760
1761
1762// Assembles branches after this instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001763void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 X64OperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001765 Label::Distance flabel_distance =
1766 branch->fallthru ? Label::kNear : Label::kFar;
1767 Label* tlabel = branch->true_label;
1768 Label* flabel = branch->false_label;
1769 switch (branch->condition) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001770 case kUnorderedEqual:
1771 __ j(parity_even, flabel, flabel_distance);
1772 // Fall through.
1773 case kEqual:
1774 __ j(equal, tlabel);
1775 break;
1776 case kUnorderedNotEqual:
1777 __ j(parity_even, tlabel);
1778 // Fall through.
1779 case kNotEqual:
1780 __ j(not_equal, tlabel);
1781 break;
1782 case kSignedLessThan:
1783 __ j(less, tlabel);
1784 break;
1785 case kSignedGreaterThanOrEqual:
1786 __ j(greater_equal, tlabel);
1787 break;
1788 case kSignedLessThanOrEqual:
1789 __ j(less_equal, tlabel);
1790 break;
1791 case kSignedGreaterThan:
1792 __ j(greater, tlabel);
1793 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001794 case kUnsignedLessThan:
1795 __ j(below, tlabel);
1796 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 case kUnsignedGreaterThanOrEqual:
1798 __ j(above_equal, tlabel);
1799 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001800 case kUnsignedLessThanOrEqual:
1801 __ j(below_equal, tlabel);
1802 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001803 case kUnsignedGreaterThan:
1804 __ j(above, tlabel);
1805 break;
1806 case kOverflow:
1807 __ j(overflow, tlabel);
1808 break;
1809 case kNotOverflow:
1810 __ j(no_overflow, tlabel);
1811 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001812 default:
1813 UNREACHABLE();
1814 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001815 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001816 if (!branch->fallthru) __ jmp(flabel, flabel_distance);
1817}
1818
1819
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001820void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001821 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822}
1823
1824
1825// Assembles boolean materializations after this instruction.
1826void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1827 FlagsCondition condition) {
1828 X64OperandConverter i(this, instr);
1829 Label done;
1830
1831 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1832 // last output of the instruction.
1833 Label check;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834 DCHECK_NE(0u, instr->OutputCount());
1835 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 Condition cc = no_condition;
1837 switch (condition) {
1838 case kUnorderedEqual:
1839 __ j(parity_odd, &check, Label::kNear);
1840 __ movl(reg, Immediate(0));
1841 __ jmp(&done, Label::kNear);
1842 // Fall through.
1843 case kEqual:
1844 cc = equal;
1845 break;
1846 case kUnorderedNotEqual:
1847 __ j(parity_odd, &check, Label::kNear);
1848 __ movl(reg, Immediate(1));
1849 __ jmp(&done, Label::kNear);
1850 // Fall through.
1851 case kNotEqual:
1852 cc = not_equal;
1853 break;
1854 case kSignedLessThan:
1855 cc = less;
1856 break;
1857 case kSignedGreaterThanOrEqual:
1858 cc = greater_equal;
1859 break;
1860 case kSignedLessThanOrEqual:
1861 cc = less_equal;
1862 break;
1863 case kSignedGreaterThan:
1864 cc = greater;
1865 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866 case kUnsignedLessThan:
1867 cc = below;
1868 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001869 case kUnsignedGreaterThanOrEqual:
1870 cc = above_equal;
1871 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872 case kUnsignedLessThanOrEqual:
1873 cc = below_equal;
1874 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001875 case kUnsignedGreaterThan:
1876 cc = above;
1877 break;
1878 case kOverflow:
1879 cc = overflow;
1880 break;
1881 case kNotOverflow:
1882 cc = no_overflow;
1883 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001884 default:
1885 UNREACHABLE();
1886 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 }
1888 __ bind(&check);
1889 __ setcc(cc, reg);
1890 __ movzxbl(reg, reg);
1891 __ bind(&done);
1892}
1893
1894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1896 X64OperandConverter i(this, instr);
1897 Register input = i.InputRegister(0);
1898 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1899 __ cmpl(input, Immediate(i.InputInt32(index + 0)));
1900 __ j(equal, GetLabel(i.InputRpo(index + 1)));
1901 }
1902 AssembleArchJump(i.InputRpo(1));
1903}
1904
1905
1906void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1907 X64OperandConverter i(this, instr);
1908 Register input = i.InputRegister(0);
1909 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1910 Label** cases = zone()->NewArray<Label*>(case_count);
1911 for (int32_t index = 0; index < case_count; ++index) {
1912 cases[index] = GetLabel(i.InputRpo(index + 2));
1913 }
1914 Label* const table = AddJumpTable(cases, case_count);
1915 __ cmpl(input, Immediate(case_count));
1916 __ j(above_equal, GetLabel(i.InputRpo(1)));
1917 __ leaq(kScratchRegister, Operand(table));
1918 __ jmp(Operand(kScratchRegister, input, times_8, 0));
1919}
1920
1921
1922void CodeGenerator::AssembleDeoptimizerCall(
1923 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001925 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1927}
1928
1929
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001930namespace {
1931
1932static const int kQuadWordSize = 16;
1933
1934} // namespace
1935
1936
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001937void CodeGenerator::AssemblePrologue() {
1938 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01001939 if (frame_access_state()->has_frame()) {
1940 if (descriptor->IsCFunctionCall()) {
1941 __ pushq(rbp);
1942 __ movq(rbp, rsp);
1943 } else if (descriptor->IsJSFunctionCall()) {
1944 __ Prologue(this->info()->GeneratePreagedPrologue());
1945 } else {
1946 __ StubPrologue(info()->GetOutputStackFrameType());
1947 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001949 int stack_shrink_slots = frame()->GetSpillSlotCount();
1950 if (info()->is_osr()) {
1951 // TurboFan OSR-compiled functions cannot be entered directly.
1952 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1953
1954 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1955 // frame is still on the stack. Optimized code uses OSR values directly from
1956 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1957 // remaining stack slots.
1958 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1959 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001960 stack_shrink_slots -=
1961 static_cast<int>(OsrHelper(info()).UnoptimizedFrameSlots());
1962 }
1963
1964 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1965 if (saves_fp != 0) {
1966 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1967 }
1968 if (stack_shrink_slots > 0) {
1969 __ subq(rsp, Immediate(stack_shrink_slots * kPointerSize));
1970 }
1971
1972 if (saves_fp != 0) { // Save callee-saved XMM registers.
1973 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
1974 const int stack_size = saves_fp_count * kQuadWordSize;
1975 // Adjust the stack pointer.
1976 __ subp(rsp, Immediate(stack_size));
1977 // Store the registers on the stack.
1978 int slot_idx = 0;
1979 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1980 if (!((1 << i) & saves_fp)) continue;
1981 __ movdqu(Operand(rsp, kQuadWordSize * slot_idx),
1982 XMMRegister::from_code(i));
1983 slot_idx++;
1984 }
1985 frame()->AllocateSavedCalleeRegisterSlots(saves_fp_count *
1986 (kQuadWordSize / kPointerSize));
1987 }
1988
1989 const RegList saves = descriptor->CalleeSavedRegisters();
1990 if (saves != 0) { // Save callee-saved registers.
1991 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
1992 if (!((1 << i) & saves)) continue;
1993 __ pushq(Register::from_code(i));
1994 frame()->AllocateSavedCalleeRegisterSlots(1);
1995 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996 }
1997}
1998
1999
2000void CodeGenerator::AssembleReturn() {
2001 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002002
2003 // Restore registers.
2004 const RegList saves = descriptor->CalleeSavedRegisters();
2005 if (saves != 0) {
2006 for (int i = 0; i < Register::kNumRegisters; i++) {
2007 if (!((1 << i) & saves)) continue;
2008 __ popq(Register::from_code(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002010 }
2011 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
2012 if (saves_fp != 0) {
2013 const uint32_t saves_fp_count = base::bits::CountPopulation32(saves_fp);
2014 const int stack_size = saves_fp_count * kQuadWordSize;
2015 // Load the registers from the stack.
2016 int slot_idx = 0;
2017 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
2018 if (!((1 << i) & saves_fp)) continue;
2019 __ movdqu(XMMRegister::from_code(i),
2020 Operand(rsp, kQuadWordSize * slot_idx));
2021 slot_idx++;
2022 }
2023 // Adjust the stack pointer.
2024 __ addp(rsp, Immediate(stack_size));
2025 }
2026
2027 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002028 AssembleDeconstructFrame();
2029 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 // Canonicalize JSFunction return sites for now.
2031 if (return_label_.is_bound()) {
2032 __ jmp(&return_label_);
2033 return;
2034 } else {
2035 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01002036 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002037 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002038 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002039 size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
2040 // Might need rcx for scratch if pop_size is too big.
2041 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
2042 __ Ret(static_cast<int>(pop_size), rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043}
2044
2045
2046void CodeGenerator::AssembleMove(InstructionOperand* source,
2047 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002048 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002049 // Dispatch on the source and destination operand kinds. Not all
2050 // combinations are possible.
2051 if (source->IsRegister()) {
2052 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2053 Register src = g.ToRegister(source);
2054 if (destination->IsRegister()) {
2055 __ movq(g.ToRegister(destination), src);
2056 } else {
2057 __ movq(g.ToOperand(destination), src);
2058 }
2059 } else if (source->IsStackSlot()) {
2060 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2061 Operand src = g.ToOperand(source);
2062 if (destination->IsRegister()) {
2063 Register dst = g.ToRegister(destination);
2064 __ movq(dst, src);
2065 } else {
2066 // Spill on demand to use a temporary register for memory-to-memory
2067 // moves.
2068 Register tmp = kScratchRegister;
2069 Operand dst = g.ToOperand(destination);
2070 __ movq(tmp, src);
2071 __ movq(dst, tmp);
2072 }
2073 } else if (source->IsConstant()) {
2074 ConstantOperand* constant_source = ConstantOperand::cast(source);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002075 Constant src = g.ToConstant(constant_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002076 if (destination->IsRegister() || destination->IsStackSlot()) {
2077 Register dst = destination->IsRegister() ? g.ToRegister(destination)
2078 : kScratchRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002079 switch (src.type()) {
2080 case Constant::kInt32:
2081 // TODO(dcarney): don't need scratch in this case.
2082 __ Set(dst, src.ToInt32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002084 case Constant::kInt64:
2085 __ Set(dst, src.ToInt64());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002087 case Constant::kFloat32:
2088 __ Move(dst,
2089 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
2090 break;
2091 case Constant::kFloat64:
2092 __ Move(dst,
2093 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
2094 break;
2095 case Constant::kExternalReference:
2096 __ Move(dst, src.ToExternalReference());
2097 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098 case Constant::kHeapObject: {
2099 Handle<HeapObject> src_object = src.ToHeapObject();
2100 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01002101 int slot;
2102 if (IsMaterializableFromFrame(src_object, &slot)) {
2103 __ movp(dst, g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002104 } else if (IsMaterializableFromRoot(src_object, &index)) {
2105 __ LoadRoot(dst, index);
2106 } else {
2107 __ Move(dst, src_object);
2108 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002109 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002110 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002111 case Constant::kRpoNumber:
2112 UNREACHABLE(); // TODO(dcarney): load of labels on x64.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002113 break;
2114 }
2115 if (destination->IsStackSlot()) {
2116 __ movq(g.ToOperand(destination), kScratchRegister);
2117 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002118 } else if (src.type() == Constant::kFloat32) {
2119 // TODO(turbofan): Can we do better here?
2120 uint32_t src_const = bit_cast<uint32_t>(src.ToFloat32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002121 if (destination->IsDoubleRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002122 __ Move(g.ToDoubleRegister(destination), src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002123 } else {
2124 DCHECK(destination->IsDoubleStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002125 Operand dst = g.ToOperand(destination);
2126 __ movl(dst, Immediate(src_const));
2127 }
2128 } else {
2129 DCHECK_EQ(Constant::kFloat64, src.type());
2130 uint64_t src_const = bit_cast<uint64_t>(src.ToFloat64());
2131 if (destination->IsDoubleRegister()) {
2132 __ Move(g.ToDoubleRegister(destination), src_const);
2133 } else {
2134 DCHECK(destination->IsDoubleStackSlot());
2135 __ movq(kScratchRegister, src_const);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136 __ movq(g.ToOperand(destination), kScratchRegister);
2137 }
2138 }
2139 } else if (source->IsDoubleRegister()) {
2140 XMMRegister src = g.ToDoubleRegister(source);
2141 if (destination->IsDoubleRegister()) {
2142 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 __ Movapd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 } else {
2145 DCHECK(destination->IsDoubleStackSlot());
2146 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002147 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 }
2149 } else if (source->IsDoubleStackSlot()) {
2150 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
2151 Operand src = g.ToOperand(source);
2152 if (destination->IsDoubleRegister()) {
2153 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002154 __ Movsd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155 } else {
2156 // We rely on having xmm0 available as a fixed scratch register.
2157 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158 __ Movsd(xmm0, src);
2159 __ Movsd(dst, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002160 }
2161 } else {
2162 UNREACHABLE();
2163 }
2164}
2165
2166
2167void CodeGenerator::AssembleSwap(InstructionOperand* source,
2168 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002169 X64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170 // Dispatch on the source and destination operand kinds. Not all
2171 // combinations are possible.
2172 if (source->IsRegister() && destination->IsRegister()) {
2173 // Register-register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002174 Register src = g.ToRegister(source);
2175 Register dst = g.ToRegister(destination);
2176 __ movq(kScratchRegister, src);
2177 __ movq(src, dst);
2178 __ movq(dst, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002179 } else if (source->IsRegister() && destination->IsStackSlot()) {
2180 Register src = g.ToRegister(source);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002181 __ pushq(src);
2182 frame_access_state()->IncreaseSPDelta(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002183 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002184 __ movq(src, dst);
2185 frame_access_state()->IncreaseSPDelta(-1);
2186 dst = g.ToOperand(destination);
2187 __ popq(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002188 } else if ((source->IsStackSlot() && destination->IsStackSlot()) ||
2189 (source->IsDoubleStackSlot() &&
2190 destination->IsDoubleStackSlot())) {
2191 // Memory-memory.
2192 Register tmp = kScratchRegister;
2193 Operand src = g.ToOperand(source);
2194 Operand dst = g.ToOperand(destination);
2195 __ movq(tmp, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002196 __ pushq(src);
2197 frame_access_state()->IncreaseSPDelta(1);
2198 src = g.ToOperand(source);
2199 __ movq(src, tmp);
2200 frame_access_state()->IncreaseSPDelta(-1);
2201 dst = g.ToOperand(destination);
2202 __ popq(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002203 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
2204 // XMM register-register swap. We rely on having xmm0
2205 // available as a fixed scratch register.
2206 XMMRegister src = g.ToDoubleRegister(source);
2207 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002208 __ Movapd(xmm0, src);
2209 __ Movapd(src, dst);
2210 __ Movapd(dst, xmm0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002211 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002212 // XMM register-memory swap. We rely on having xmm0
2213 // available as a fixed scratch register.
2214 XMMRegister src = g.ToDoubleRegister(source);
2215 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002216 __ Movsd(xmm0, src);
2217 __ Movsd(src, dst);
2218 __ Movsd(dst, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002219 } else {
2220 // No other combinations are possible.
2221 UNREACHABLE();
2222 }
2223}
2224
2225
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002226void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2227 for (size_t index = 0; index < target_count; ++index) {
2228 __ dq(targets[index]);
2229 }
2230}
2231
2232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
2234
2235
2236void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2238 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002239 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002240
2241 int space_needed = Deoptimizer::patch_size();
2242 // Ensure that we have enough space after the previous lazy-bailout
2243 // instruction for patching the code here.
2244 int current_pc = masm()->pc_offset();
2245 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2246 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2247 __ Nop(padding_size);
2248 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002249}
2250
2251#undef __
2252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002253} // namespace compiler
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254} // namespace internal
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002255} // namespace v8