blob: 1f61af8abf9c14f56ff27a7eb4dae6bcd8b40f6a [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/compiler/osr.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010012#include "src/frames.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013#include "src/ia32/assembler-ia32.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/ia32/frames-ia32.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015#include "src/ia32/macro-assembler-ia32.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000016
17namespace v8 {
18namespace internal {
19namespace compiler {
20
21#define __ masm()->
22
23
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024#define kScratchDoubleReg xmm0
25
26
Ben Murdochb8a8cc12014-11-26 15:28:44 +000027// Adds IA-32 specific methods for decoding operands.
28class IA32OperandConverter : public InstructionOperandConverter {
29 public:
30 IA32OperandConverter(CodeGenerator* gen, Instruction* instr)
31 : InstructionOperandConverter(gen, instr) {}
32
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 Operand InputOperand(size_t index, int extra = 0) {
34 return ToOperand(instr_->InputAt(index), extra);
35 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037 Immediate InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038 return ToImmediate(instr_->InputAt(index));
39 }
40
41 Operand OutputOperand() { return ToOperand(instr_->Output()); }
42
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 Operand ToOperand(InstructionOperand* op, int extra = 0) {
44 if (op->IsRegister()) {
45 DCHECK(extra == 0);
46 return Operand(ToRegister(op));
47 } else if (op->IsDoubleRegister()) {
48 DCHECK(extra == 0);
49 return Operand(ToDoubleRegister(op));
50 }
51 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 FrameOffset offset = frame_access_state()->GetFrameOffset(
53 AllocatedOperand::cast(op)->index());
54 return Operand(offset.from_stack_pointer() ? esp : ebp,
55 offset.offset() + extra);
56 }
57
58 Operand ToMaterializableOperand(int materializable_offset) {
59 FrameOffset offset = frame_access_state()->GetFrameOffset(
Ben Murdoch097c5b22016-05-18 11:27:45 +010060 FPOffsetToFrameSlot(materializable_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset());
62 }
63
64 Operand HighOperand(InstructionOperand* op) {
65 DCHECK(op->IsDoubleStackSlot());
66 return ToOperand(op, kPointerSize);
67 }
68
69 Immediate ToImmediate(InstructionOperand* operand) {
70 Constant constant = ToConstant(operand);
71 switch (constant.type()) {
72 case Constant::kInt32:
73 return Immediate(constant.ToInt32());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 case Constant::kFloat32:
75 return Immediate(
76 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 case Constant::kFloat64:
78 return Immediate(
79 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
80 case Constant::kExternalReference:
81 return Immediate(constant.ToExternalReference());
82 case Constant::kHeapObject:
83 return Immediate(constant.ToHeapObject());
84 case Constant::kInt64:
85 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086 case Constant::kRpoNumber:
87 return Immediate::CodeRelativeOffset(ToLabel(operand));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 }
89 UNREACHABLE();
90 return Immediate(-1);
91 }
92
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000093 static size_t NextOffset(size_t* offset) {
94 size_t i = *offset;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 (*offset)++;
96 return i;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 }
98
Emily Bernierd0a1eb72015-03-24 16:35:39 -040099 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
100 STATIC_ASSERT(0 == static_cast<int>(times_1));
101 STATIC_ASSERT(1 == static_cast<int>(times_2));
102 STATIC_ASSERT(2 == static_cast<int>(times_4));
103 STATIC_ASSERT(3 == static_cast<int>(times_8));
104 int scale = static_cast<int>(mode - one);
105 DCHECK(scale >= 0 && scale < 4);
106 return static_cast<ScaleFactor>(scale);
107 }
108
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 Operand MemoryOperand(size_t* offset) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400110 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
111 switch (mode) {
112 case kMode_MR: {
113 Register base = InputRegister(NextOffset(offset));
114 int32_t disp = 0;
115 return Operand(base, disp);
116 }
117 case kMode_MRI: {
118 Register base = InputRegister(NextOffset(offset));
119 int32_t disp = InputInt32(NextOffset(offset));
120 return Operand(base, disp);
121 }
122 case kMode_MR1:
123 case kMode_MR2:
124 case kMode_MR4:
125 case kMode_MR8: {
126 Register base = InputRegister(NextOffset(offset));
127 Register index = InputRegister(NextOffset(offset));
128 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
129 int32_t disp = 0;
130 return Operand(base, index, scale, disp);
131 }
132 case kMode_MR1I:
133 case kMode_MR2I:
134 case kMode_MR4I:
135 case kMode_MR8I: {
136 Register base = InputRegister(NextOffset(offset));
137 Register index = InputRegister(NextOffset(offset));
138 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
139 int32_t disp = InputInt32(NextOffset(offset));
140 return Operand(base, index, scale, disp);
141 }
142 case kMode_M1:
143 case kMode_M2:
144 case kMode_M4:
145 case kMode_M8: {
146 Register index = InputRegister(NextOffset(offset));
147 ScaleFactor scale = ScaleFor(kMode_M1, mode);
148 int32_t disp = 0;
149 return Operand(index, scale, disp);
150 }
151 case kMode_M1I:
152 case kMode_M2I:
153 case kMode_M4I:
154 case kMode_M8I: {
155 Register index = InputRegister(NextOffset(offset));
156 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
157 int32_t disp = InputInt32(NextOffset(offset));
158 return Operand(index, scale, disp);
159 }
160 case kMode_MI: {
161 int32_t disp = InputInt32(NextOffset(offset));
162 return Operand(Immediate(disp));
163 }
164 case kMode_None:
165 UNREACHABLE();
166 return Operand(no_reg, 0);
167 }
168 UNREACHABLE();
169 return Operand(no_reg, 0);
170 }
171
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000172 Operand MemoryOperand(size_t first_input = 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000173 return MemoryOperand(&first_input);
174 }
175};
176
177
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400178namespace {
179
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180bool HasImmediateInput(Instruction* instr, size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 return instr->InputAt(index)->IsImmediate();
182}
183
184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185class OutOfLineLoadInteger final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400186 public:
187 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
188 : OutOfLineCode(gen), result_(result) {}
189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 void Generate() final { __ xor_(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400191
192 private:
193 Register const result_;
194};
195
196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197class OutOfLineLoadFloat final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400198 public:
199 OutOfLineLoadFloat(CodeGenerator* gen, XMMRegister result)
200 : OutOfLineCode(gen), result_(result) {}
201
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000202 void Generate() final { __ pcmpeqd(result_, result_); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400203
204 private:
205 XMMRegister const result_;
206};
207
208
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000209class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400210 public:
211 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
212 XMMRegister input)
213 : OutOfLineCode(gen), result_(result), input_(input) {}
214
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000215 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400216 __ sub(esp, Immediate(kDoubleSize));
217 __ movsd(MemOperand(esp, 0), input_);
218 __ SlowTruncateToI(result_, esp, 0);
219 __ add(esp, Immediate(kDoubleSize));
220 }
221
222 private:
223 Register const result_;
224 XMMRegister const input_;
225};
226
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000227
228class OutOfLineRecordWrite final : public OutOfLineCode {
229 public:
230 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
231 Register value, Register scratch0, Register scratch1,
232 RecordWriteMode mode)
233 : OutOfLineCode(gen),
234 object_(object),
235 operand_(operand),
236 value_(value),
237 scratch0_(scratch0),
238 scratch1_(scratch1),
239 mode_(mode) {}
240
241 void Generate() final {
242 if (mode_ > RecordWriteMode::kValueIsPointer) {
243 __ JumpIfSmi(value_, exit());
244 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100245 __ CheckPageFlag(value_, scratch0_,
246 MemoryChunk::kPointersToHereAreInterestingMask, zero,
247 exit());
248 RememberedSetAction const remembered_set_action =
249 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
250 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 SaveFPRegsMode const save_fp_mode =
252 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
253 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100254 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 __ lea(scratch1_, operand_);
256 __ CallStub(&stub);
257 }
258
259 private:
260 Register const object_;
261 Operand const operand_;
262 Register const value_;
263 Register const scratch0_;
264 Register const scratch1_;
265 RecordWriteMode const mode_;
266};
267
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400268} // namespace
269
270
271#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
272 do { \
273 auto result = i.OutputDoubleRegister(); \
274 auto offset = i.InputRegister(0); \
275 if (instr->InputAt(1)->IsRegister()) { \
276 __ cmp(offset, i.InputRegister(1)); \
277 } else { \
278 __ cmp(offset, i.InputImmediate(1)); \
279 } \
280 OutOfLineCode* ool = new (zone()) OutOfLineLoadFloat(this, result); \
281 __ j(above_equal, ool->entry()); \
282 __ asm_instr(result, i.MemoryOperand(2)); \
283 __ bind(ool->exit()); \
284 } while (false)
285
286
287#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
288 do { \
289 auto result = i.OutputRegister(); \
290 auto offset = i.InputRegister(0); \
291 if (instr->InputAt(1)->IsRegister()) { \
292 __ cmp(offset, i.InputRegister(1)); \
293 } else { \
294 __ cmp(offset, i.InputImmediate(1)); \
295 } \
296 OutOfLineCode* ool = new (zone()) OutOfLineLoadInteger(this, result); \
297 __ j(above_equal, ool->entry()); \
298 __ asm_instr(result, i.MemoryOperand(2)); \
299 __ bind(ool->exit()); \
300 } while (false)
301
302
303#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
304 do { \
305 auto offset = i.InputRegister(0); \
306 if (instr->InputAt(1)->IsRegister()) { \
307 __ cmp(offset, i.InputRegister(1)); \
308 } else { \
309 __ cmp(offset, i.InputImmediate(1)); \
310 } \
311 Label done; \
312 __ j(above_equal, &done, Label::kNear); \
313 __ asm_instr(i.MemoryOperand(3), i.InputDoubleRegister(2)); \
314 __ bind(&done); \
315 } while (false)
316
317
318#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
319 do { \
320 auto offset = i.InputRegister(0); \
321 if (instr->InputAt(1)->IsRegister()) { \
322 __ cmp(offset, i.InputRegister(1)); \
323 } else { \
324 __ cmp(offset, i.InputImmediate(1)); \
325 } \
326 Label done; \
327 __ j(above_equal, &done, Label::kNear); \
328 if (instr->InputAt(2)->IsRegister()) { \
329 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \
330 } else { \
331 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \
332 } \
333 __ bind(&done); \
334 } while (false)
335
336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
338 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
339 if (sp_slot_delta > 0) {
340 __ add(esp, Immediate(sp_slot_delta * kPointerSize));
341 }
342 frame_access_state()->SetFrameAccessToDefault();
343}
344
345
346void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
347 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
348 if (sp_slot_delta < 0) {
349 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize));
350 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
351 }
352 if (frame()->needs_frame()) {
353 __ mov(ebp, MemOperand(ebp, 0));
354 }
355 frame_access_state()->SetFrameAccessToSP();
356}
357
358
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359// Assembles an instruction after register allocation, producing machine code.
360void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
361 IA32OperandConverter i(this, instr);
362
363 switch (ArchOpcodeField::decode(instr->opcode())) {
364 case kArchCallCodeObject: {
365 EnsureSpaceForLazyDeopt();
366 if (HasImmediateInput(instr, 0)) {
367 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
368 __ call(code, RelocInfo::CODE_TARGET);
369 } else {
370 Register reg = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000371 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
372 __ call(reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 RecordCallPosition(instr);
375 frame_access_state()->ClearSPDelta();
376 break;
377 }
378 case kArchTailCallCodeObject: {
379 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
380 AssembleDeconstructActivationRecord(stack_param_delta);
381 if (HasImmediateInput(instr, 0)) {
382 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
383 __ jmp(code, RelocInfo::CODE_TARGET);
384 } else {
385 Register reg = i.InputRegister(0);
386 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
387 __ jmp(reg);
388 }
389 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000390 break;
391 }
392 case kArchCallJSFunction: {
393 EnsureSpaceForLazyDeopt();
394 Register func = i.InputRegister(0);
395 if (FLAG_debug_code) {
396 // Check the function's context matches the context argument.
397 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
398 __ Assert(equal, kWrongFunctionContext);
399 }
400 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 RecordCallPosition(instr);
402 frame_access_state()->ClearSPDelta();
403 break;
404 }
405 case kArchTailCallJSFunction: {
406 Register func = i.InputRegister(0);
407 if (FLAG_debug_code) {
408 // Check the function's context matches the context argument.
409 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
410 __ Assert(equal, kWrongFunctionContext);
411 }
412 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
413 AssembleDeconstructActivationRecord(stack_param_delta);
414 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
415 frame_access_state()->ClearSPDelta();
416 break;
417 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000418 case kArchPrepareCallCFunction: {
419 // Frame alignment requires using FP-relative frame addressing.
420 frame_access_state()->SetFrameAccessToFP();
421 int const num_parameters = MiscField::decode(instr->opcode());
422 __ PrepareCallCFunction(num_parameters, i.TempRegister(0));
423 break;
424 }
425 case kArchPrepareTailCall:
426 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
427 break;
428 case kArchCallCFunction: {
429 int const num_parameters = MiscField::decode(instr->opcode());
430 if (HasImmediateInput(instr, 0)) {
431 ExternalReference ref = i.InputExternalReference(0);
432 __ CallCFunction(ref, num_parameters);
433 } else {
434 Register func = i.InputRegister(0);
435 __ CallCFunction(func, num_parameters);
436 }
437 frame_access_state()->SetFrameAccessToDefault();
438 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 break;
440 }
441 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400442 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444 case kArchLookupSwitch:
445 AssembleArchLookupSwitch(instr);
446 break;
447 case kArchTableSwitch:
448 AssembleArchTableSwitch(instr);
449 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 // don't emit code for nops.
453 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 case kArchDeoptimize: {
455 int deopt_state_id =
456 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
457 Deoptimizer::BailoutType bailout_type =
458 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
459 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
460 break;
461 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 case kArchRet:
463 AssembleReturn();
464 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400465 case kArchStackPointer:
466 __ mov(i.OutputRegister(), esp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468 case kArchFramePointer:
469 __ mov(i.OutputRegister(), ebp);
470 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100471 case kArchParentFramePointer:
472 if (frame_access_state()->frame()->needs_frame()) {
473 __ mov(i.OutputRegister(), Operand(ebp, 0));
474 } else {
475 __ mov(i.OutputRegister(), ebp);
476 }
477 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400478 case kArchTruncateDoubleToI: {
479 auto result = i.OutputRegister();
480 auto input = i.InputDoubleRegister(0);
481 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input);
482 __ cvttsd2si(result, Operand(input));
483 __ cmp(result, 1);
484 __ j(overflow, ool->entry());
485 __ bind(ool->exit());
486 break;
487 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488 case kArchStoreWithWriteBarrier: {
489 RecordWriteMode mode =
490 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
491 Register object = i.InputRegister(0);
492 size_t index = 0;
493 Operand operand = i.MemoryOperand(&index);
494 Register value = i.InputRegister(index);
495 Register scratch0 = i.TempRegister(0);
496 Register scratch1 = i.TempRegister(1);
497 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
498 scratch0, scratch1, mode);
499 __ mov(operand, value);
500 __ CheckPageFlag(object, scratch0,
501 MemoryChunk::kPointersFromHereAreInterestingMask,
502 not_zero, ool->entry());
503 __ bind(ool->exit());
504 break;
505 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100506 case kArchStackSlot: {
507 FrameOffset offset =
508 frame_access_state()->GetFrameOffset(i.InputInt32(0));
509 Register base;
510 if (offset.from_stack_pointer()) {
511 base = esp;
512 } else {
513 base = ebp;
514 }
515 __ lea(i.OutputRegister(), Operand(base, offset.offset()));
516 break;
517 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 case kIA32Add:
519 if (HasImmediateInput(instr, 1)) {
520 __ add(i.InputOperand(0), i.InputImmediate(1));
521 } else {
522 __ add(i.InputRegister(0), i.InputOperand(1));
523 }
524 break;
525 case kIA32And:
526 if (HasImmediateInput(instr, 1)) {
527 __ and_(i.InputOperand(0), i.InputImmediate(1));
528 } else {
529 __ and_(i.InputRegister(0), i.InputOperand(1));
530 }
531 break;
532 case kIA32Cmp:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100533 if (AddressingModeField::decode(instr->opcode()) != kMode_None) {
534 size_t index = 0;
535 Operand operand = i.MemoryOperand(&index);
536 if (HasImmediateInput(instr, index)) {
537 __ cmp(operand, i.InputImmediate(index));
538 } else {
539 __ cmp(operand, i.InputRegister(index));
540 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100542 if (HasImmediateInput(instr, 1)) {
543 __ cmp(i.InputOperand(0), i.InputImmediate(1));
544 } else {
545 __ cmp(i.InputRegister(0), i.InputOperand(1));
546 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 }
548 break;
549 case kIA32Test:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100550 if (AddressingModeField::decode(instr->opcode()) != kMode_None) {
551 size_t index = 0;
552 Operand operand = i.MemoryOperand(&index);
553 if (HasImmediateInput(instr, index)) {
554 __ test(operand, i.InputImmediate(index));
555 } else {
556 __ test(i.InputRegister(index), operand);
557 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100559 if (HasImmediateInput(instr, 1)) {
560 __ test(i.InputOperand(0), i.InputImmediate(1));
561 } else {
562 __ test(i.InputRegister(0), i.InputOperand(1));
563 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000564 }
565 break;
566 case kIA32Imul:
567 if (HasImmediateInput(instr, 1)) {
568 __ imul(i.OutputRegister(), i.InputOperand(0), i.InputInt32(1));
569 } else {
570 __ imul(i.OutputRegister(), i.InputOperand(1));
571 }
572 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400573 case kIA32ImulHigh:
574 __ imul(i.InputRegister(1));
575 break;
576 case kIA32UmulHigh:
577 __ mul(i.InputRegister(1));
578 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 case kIA32Idiv:
580 __ cdq();
581 __ idiv(i.InputOperand(1));
582 break;
583 case kIA32Udiv:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400584 __ Move(edx, Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 __ div(i.InputOperand(1));
586 break;
587 case kIA32Not:
588 __ not_(i.OutputOperand());
589 break;
590 case kIA32Neg:
591 __ neg(i.OutputOperand());
592 break;
593 case kIA32Or:
594 if (HasImmediateInput(instr, 1)) {
595 __ or_(i.InputOperand(0), i.InputImmediate(1));
596 } else {
597 __ or_(i.InputRegister(0), i.InputOperand(1));
598 }
599 break;
600 case kIA32Xor:
601 if (HasImmediateInput(instr, 1)) {
602 __ xor_(i.InputOperand(0), i.InputImmediate(1));
603 } else {
604 __ xor_(i.InputRegister(0), i.InputOperand(1));
605 }
606 break;
607 case kIA32Sub:
608 if (HasImmediateInput(instr, 1)) {
609 __ sub(i.InputOperand(0), i.InputImmediate(1));
610 } else {
611 __ sub(i.InputRegister(0), i.InputOperand(1));
612 }
613 break;
614 case kIA32Shl:
615 if (HasImmediateInput(instr, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400616 __ shl(i.OutputOperand(), i.InputInt5(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400618 __ shl_cl(i.OutputOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 }
620 break;
621 case kIA32Shr:
622 if (HasImmediateInput(instr, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400623 __ shr(i.OutputOperand(), i.InputInt5(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000624 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400625 __ shr_cl(i.OutputOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 }
627 break;
628 case kIA32Sar:
629 if (HasImmediateInput(instr, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400630 __ sar(i.OutputOperand(), i.InputInt5(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400632 __ sar_cl(i.OutputOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633 }
634 break;
635 case kIA32Ror:
636 if (HasImmediateInput(instr, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400637 __ ror(i.OutputOperand(), i.InputInt5(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400639 __ ror_cl(i.OutputOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 }
641 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000642 case kIA32Lzcnt:
643 __ Lzcnt(i.OutputRegister(), i.InputOperand(0));
644 break;
645 case kIA32Tzcnt:
646 __ Tzcnt(i.OutputRegister(), i.InputOperand(0));
647 break;
648 case kIA32Popcnt:
649 __ Popcnt(i.OutputRegister(), i.InputOperand(0));
650 break;
651 case kSSEFloat32Cmp:
652 __ ucomiss(i.InputDoubleRegister(0), i.InputOperand(1));
653 break;
654 case kSSEFloat32Add:
655 __ addss(i.InputDoubleRegister(0), i.InputOperand(1));
656 break;
657 case kSSEFloat32Sub:
658 __ subss(i.InputDoubleRegister(0), i.InputOperand(1));
659 break;
660 case kSSEFloat32Mul:
661 __ mulss(i.InputDoubleRegister(0), i.InputOperand(1));
662 break;
663 case kSSEFloat32Div:
664 __ divss(i.InputDoubleRegister(0), i.InputOperand(1));
665 // Don't delete this mov. It may improve performance on some CPUs,
666 // when there is a (v)mulss depending on the result.
667 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
668 break;
669 case kSSEFloat32Max:
670 __ maxss(i.InputDoubleRegister(0), i.InputOperand(1));
671 break;
672 case kSSEFloat32Min:
673 __ minss(i.InputDoubleRegister(0), i.InputOperand(1));
674 break;
675 case kSSEFloat32Sqrt:
676 __ sqrtss(i.OutputDoubleRegister(), i.InputOperand(0));
677 break;
678 case kSSEFloat32Abs: {
679 // TODO(bmeurer): Use 128-bit constants.
680 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
681 __ psrlq(kScratchDoubleReg, 33);
682 __ andps(i.OutputDoubleRegister(), kScratchDoubleReg);
683 break;
684 }
685 case kSSEFloat32Neg: {
686 // TODO(bmeurer): Use 128-bit constants.
687 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
688 __ psllq(kScratchDoubleReg, 31);
689 __ xorps(i.OutputDoubleRegister(), kScratchDoubleReg);
690 break;
691 }
692 case kSSEFloat32Round: {
693 CpuFeatureScope sse_scope(masm(), SSE4_1);
694 RoundingMode const mode =
695 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
696 __ roundss(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
697 break;
698 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 case kSSEFloat64Cmp:
700 __ ucomisd(i.InputDoubleRegister(0), i.InputOperand(1));
701 break;
702 case kSSEFloat64Add:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400703 __ addsd(i.InputDoubleRegister(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 break;
705 case kSSEFloat64Sub:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400706 __ subsd(i.InputDoubleRegister(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707 break;
708 case kSSEFloat64Mul:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400709 __ mulsd(i.InputDoubleRegister(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000710 break;
711 case kSSEFloat64Div:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400712 __ divsd(i.InputDoubleRegister(0), i.InputOperand(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000713 // Don't delete this mov. It may improve performance on some CPUs,
714 // when there is a (v)mulsd depending on the result.
715 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
716 break;
717 case kSSEFloat64Max:
718 __ maxsd(i.InputDoubleRegister(0), i.InputOperand(1));
719 break;
720 case kSSEFloat64Min:
721 __ minsd(i.InputDoubleRegister(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722 break;
723 case kSSEFloat64Mod: {
724 // TODO(dcarney): alignment is wrong.
725 __ sub(esp, Immediate(kDoubleSize));
726 // Move values to st(0) and st(1).
727 __ movsd(Operand(esp, 0), i.InputDoubleRegister(1));
728 __ fld_d(Operand(esp, 0));
729 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0));
730 __ fld_d(Operand(esp, 0));
731 // Loop while fprem isn't done.
732 Label mod_loop;
733 __ bind(&mod_loop);
734 // This instructions traps on all kinds inputs, but we are assuming the
735 // floating point control word is set to ignore them all.
736 __ fprem();
737 // The following 2 instruction implicitly use eax.
738 __ fnstsw_ax();
739 __ sahf();
740 __ j(parity_even, &mod_loop);
741 // Move output to stack and clean up.
742 __ fstp(1);
743 __ fstp_d(Operand(esp, 0));
744 __ movsd(i.OutputDoubleRegister(), Operand(esp, 0));
745 __ add(esp, Immediate(kDoubleSize));
746 break;
747 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748 case kSSEFloat64Abs: {
749 // TODO(bmeurer): Use 128-bit constants.
750 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
751 __ psrlq(kScratchDoubleReg, 1);
752 __ andpd(i.OutputDoubleRegister(), kScratchDoubleReg);
753 break;
754 }
755 case kSSEFloat64Neg: {
756 // TODO(bmeurer): Use 128-bit constants.
757 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
758 __ psllq(kScratchDoubleReg, 63);
759 __ xorpd(i.OutputDoubleRegister(), kScratchDoubleReg);
760 break;
761 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762 case kSSEFloat64Sqrt:
763 __ sqrtsd(i.OutputDoubleRegister(), i.InputOperand(0));
764 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765 case kSSEFloat64Round: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400766 CpuFeatureScope sse_scope(masm(), SSE4_1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000767 RoundingMode const mode =
768 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
769 __ roundsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0), mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400770 break;
771 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772 case kSSEFloat32ToFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400773 __ cvtss2sd(i.OutputDoubleRegister(), i.InputOperand(0));
774 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000775 case kSSEFloat64ToFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400776 __ cvtsd2ss(i.OutputDoubleRegister(), i.InputOperand(0));
777 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100778 case kSSEFloat32ToInt32:
779 __ cvttss2si(i.OutputRegister(), i.InputOperand(0));
780 break;
781 case kSSEFloat32ToUint32: {
782 Label success;
783 __ cvttss2si(i.OutputRegister(), i.InputOperand(0));
784 __ test(i.OutputRegister(), i.OutputRegister());
785 __ j(positive, &success);
786 __ Move(kScratchDoubleReg, static_cast<float>(INT32_MIN));
787 __ addss(kScratchDoubleReg, i.InputOperand(0));
788 __ cvttss2si(i.OutputRegister(), kScratchDoubleReg);
789 __ or_(i.OutputRegister(), Immediate(0x80000000));
790 __ bind(&success);
791 break;
792 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 case kSSEFloat64ToInt32:
794 __ cvttsd2si(i.OutputRegister(), i.InputOperand(0));
795 break;
796 case kSSEFloat64ToUint32: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 __ Move(kScratchDoubleReg, -2147483648.0);
798 __ addsd(kScratchDoubleReg, i.InputOperand(0));
799 __ cvttsd2si(i.OutputRegister(), kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 __ add(i.OutputRegister(), Immediate(0x80000000));
801 break;
802 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100803 case kSSEInt32ToFloat32:
804 __ cvtsi2ss(i.OutputDoubleRegister(), i.InputOperand(0));
805 break;
806 case kSSEUint32ToFloat32: {
807 Register scratch0 = i.TempRegister(0);
808 Register scratch1 = i.TempRegister(1);
809 __ mov(scratch0, i.InputOperand(0));
810 __ Cvtui2ss(i.OutputDoubleRegister(), scratch0, scratch1);
811 break;
812 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 case kSSEInt32ToFloat64:
814 __ cvtsi2sd(i.OutputDoubleRegister(), i.InputOperand(0));
815 break;
816 case kSSEUint32ToFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400817 __ LoadUint32(i.OutputDoubleRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819 case kSSEFloat64ExtractLowWord32:
820 if (instr->InputAt(0)->IsDoubleStackSlot()) {
821 __ mov(i.OutputRegister(), i.InputOperand(0));
822 } else {
823 __ movd(i.OutputRegister(), i.InputDoubleRegister(0));
824 }
825 break;
826 case kSSEFloat64ExtractHighWord32:
827 if (instr->InputAt(0)->IsDoubleStackSlot()) {
828 __ mov(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
829 } else {
830 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1);
831 }
832 break;
833 case kSSEFloat64InsertLowWord32:
834 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0);
835 break;
836 case kSSEFloat64InsertHighWord32:
837 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1);
838 break;
839 case kSSEFloat64LoadLowWord32:
840 __ movd(i.OutputDoubleRegister(), i.InputOperand(0));
841 break;
842 case kAVXFloat32Add: {
843 CpuFeatureScope avx_scope(masm(), AVX);
844 __ vaddss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
845 i.InputOperand(1));
846 break;
847 }
848 case kAVXFloat32Sub: {
849 CpuFeatureScope avx_scope(masm(), AVX);
850 __ vsubss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
851 i.InputOperand(1));
852 break;
853 }
854 case kAVXFloat32Mul: {
855 CpuFeatureScope avx_scope(masm(), AVX);
856 __ vmulss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
857 i.InputOperand(1));
858 break;
859 }
860 case kAVXFloat32Div: {
861 CpuFeatureScope avx_scope(masm(), AVX);
862 __ vdivss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
863 i.InputOperand(1));
864 // Don't delete this mov. It may improve performance on some CPUs,
865 // when there is a (v)mulss depending on the result.
866 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
867 break;
868 }
869 case kAVXFloat32Max: {
870 CpuFeatureScope avx_scope(masm(), AVX);
871 __ vmaxss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
872 i.InputOperand(1));
873 break;
874 }
875 case kAVXFloat32Min: {
876 CpuFeatureScope avx_scope(masm(), AVX);
877 __ vminss(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
878 i.InputOperand(1));
879 break;
880 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400881 case kAVXFloat64Add: {
882 CpuFeatureScope avx_scope(masm(), AVX);
883 __ vaddsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
884 i.InputOperand(1));
885 break;
886 }
887 case kAVXFloat64Sub: {
888 CpuFeatureScope avx_scope(masm(), AVX);
889 __ vsubsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
890 i.InputOperand(1));
891 break;
892 }
893 case kAVXFloat64Mul: {
894 CpuFeatureScope avx_scope(masm(), AVX);
895 __ vmulsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
896 i.InputOperand(1));
897 break;
898 }
899 case kAVXFloat64Div: {
900 CpuFeatureScope avx_scope(masm(), AVX);
901 __ vdivsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
902 i.InputOperand(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000903 // Don't delete this mov. It may improve performance on some CPUs,
904 // when there is a (v)mulsd depending on the result.
905 __ movaps(i.OutputDoubleRegister(), i.OutputDoubleRegister());
906 break;
907 }
908 case kAVXFloat64Max: {
909 CpuFeatureScope avx_scope(masm(), AVX);
910 __ vmaxsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
911 i.InputOperand(1));
912 break;
913 }
914 case kAVXFloat64Min: {
915 CpuFeatureScope avx_scope(masm(), AVX);
916 __ vminsd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
917 i.InputOperand(1));
918 break;
919 }
920 case kAVXFloat32Abs: {
921 // TODO(bmeurer): Use RIP relative 128-bit constants.
922 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
923 __ psrlq(kScratchDoubleReg, 33);
924 CpuFeatureScope avx_scope(masm(), AVX);
925 __ vandps(i.OutputDoubleRegister(), kScratchDoubleReg, i.InputOperand(0));
926 break;
927 }
928 case kAVXFloat32Neg: {
929 // TODO(bmeurer): Use RIP relative 128-bit constants.
930 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
931 __ psllq(kScratchDoubleReg, 31);
932 CpuFeatureScope avx_scope(masm(), AVX);
933 __ vxorps(i.OutputDoubleRegister(), kScratchDoubleReg, i.InputOperand(0));
934 break;
935 }
936 case kAVXFloat64Abs: {
937 // TODO(bmeurer): Use RIP relative 128-bit constants.
938 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
939 __ psrlq(kScratchDoubleReg, 1);
940 CpuFeatureScope avx_scope(masm(), AVX);
941 __ vandpd(i.OutputDoubleRegister(), kScratchDoubleReg, i.InputOperand(0));
942 break;
943 }
944 case kAVXFloat64Neg: {
945 // TODO(bmeurer): Use RIP relative 128-bit constants.
946 __ pcmpeqd(kScratchDoubleReg, kScratchDoubleReg);
947 __ psllq(kScratchDoubleReg, 63);
948 CpuFeatureScope avx_scope(masm(), AVX);
949 __ vxorpd(i.OutputDoubleRegister(), kScratchDoubleReg, i.InputOperand(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400950 break;
951 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 case kIA32Movsxbl:
953 __ movsx_b(i.OutputRegister(), i.MemoryOperand());
954 break;
955 case kIA32Movzxbl:
956 __ movzx_b(i.OutputRegister(), i.MemoryOperand());
957 break;
958 case kIA32Movb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960 Operand operand = i.MemoryOperand(&index);
961 if (HasImmediateInput(instr, index)) {
962 __ mov_b(operand, i.InputInt8(index));
963 } else {
964 __ mov_b(operand, i.InputRegister(index));
965 }
966 break;
967 }
968 case kIA32Movsxwl:
969 __ movsx_w(i.OutputRegister(), i.MemoryOperand());
970 break;
971 case kIA32Movzxwl:
972 __ movzx_w(i.OutputRegister(), i.MemoryOperand());
973 break;
974 case kIA32Movw: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000975 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000976 Operand operand = i.MemoryOperand(&index);
977 if (HasImmediateInput(instr, index)) {
978 __ mov_w(operand, i.InputInt16(index));
979 } else {
980 __ mov_w(operand, i.InputRegister(index));
981 }
982 break;
983 }
984 case kIA32Movl:
985 if (instr->HasOutput()) {
986 __ mov(i.OutputRegister(), i.MemoryOperand());
987 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000989 Operand operand = i.MemoryOperand(&index);
990 if (HasImmediateInput(instr, index)) {
991 __ mov(operand, i.InputImmediate(index));
992 } else {
993 __ mov(operand, i.InputRegister(index));
994 }
995 }
996 break;
997 case kIA32Movsd:
998 if (instr->HasOutput()) {
999 __ movsd(i.OutputDoubleRegister(), i.MemoryOperand());
1000 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001002 Operand operand = i.MemoryOperand(&index);
1003 __ movsd(operand, i.InputDoubleRegister(index));
1004 }
1005 break;
1006 case kIA32Movss:
1007 if (instr->HasOutput()) {
1008 __ movss(i.OutputDoubleRegister(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001009 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 Operand operand = i.MemoryOperand(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001012 __ movss(operand, i.InputDoubleRegister(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001013 }
1014 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015 case kIA32BitcastFI:
1016 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1017 __ mov(i.OutputRegister(), i.InputOperand(0));
1018 } else {
1019 __ movd(i.OutputRegister(), i.InputDoubleRegister(0));
1020 }
1021 break;
1022 case kIA32BitcastIF:
1023 if (instr->InputAt(0)->IsRegister()) {
1024 __ movd(i.OutputDoubleRegister(), i.InputRegister(0));
1025 } else {
1026 __ movss(i.OutputDoubleRegister(), i.InputOperand(0));
1027 }
1028 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001029 case kIA32Lea: {
1030 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1031 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1032 // and addressing mode just happens to work out. The "addl"/"subl" forms
1033 // in these cases are faster based on measurements.
1034 if (mode == kMode_MI) {
1035 __ Move(i.OutputRegister(), Immediate(i.InputInt32(0)));
1036 } else if (i.InputRegister(0).is(i.OutputRegister())) {
1037 if (mode == kMode_MRI) {
1038 int32_t constant_summand = i.InputInt32(1);
1039 if (constant_summand > 0) {
1040 __ add(i.OutputRegister(), Immediate(constant_summand));
1041 } else if (constant_summand < 0) {
1042 __ sub(i.OutputRegister(), Immediate(-constant_summand));
1043 }
1044 } else if (mode == kMode_MR1) {
1045 if (i.InputRegister(1).is(i.OutputRegister())) {
1046 __ shl(i.OutputRegister(), 1);
1047 } else {
1048 __ lea(i.OutputRegister(), i.MemoryOperand());
1049 }
1050 } else if (mode == kMode_M2) {
1051 __ shl(i.OutputRegister(), 1);
1052 } else if (mode == kMode_M4) {
1053 __ shl(i.OutputRegister(), 2);
1054 } else if (mode == kMode_M8) {
1055 __ shl(i.OutputRegister(), 3);
1056 } else {
1057 __ lea(i.OutputRegister(), i.MemoryOperand());
1058 }
1059 } else {
1060 __ lea(i.OutputRegister(), i.MemoryOperand());
1061 }
1062 break;
1063 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001064 case kIA32PushFloat32:
1065 if (instr->InputAt(0)->IsDoubleRegister()) {
1066 __ sub(esp, Immediate(kDoubleSize));
1067 __ movss(Operand(esp, 0), i.InputDoubleRegister(0));
1068 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1069 } else if (HasImmediateInput(instr, 0)) {
1070 __ Move(kScratchDoubleReg, i.InputDouble(0));
1071 __ sub(esp, Immediate(kDoubleSize));
1072 __ movss(Operand(esp, 0), kScratchDoubleReg);
1073 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 __ movsd(kScratchDoubleReg, i.InputOperand(0));
1076 __ sub(esp, Immediate(kDoubleSize));
1077 __ movss(Operand(esp, 0), kScratchDoubleReg);
1078 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079 }
1080 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 case kIA32PushFloat64:
1082 if (instr->InputAt(0)->IsDoubleRegister()) {
1083 __ sub(esp, Immediate(kDoubleSize));
1084 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0));
1085 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1086 } else if (HasImmediateInput(instr, 0)) {
1087 __ Move(kScratchDoubleReg, i.InputDouble(0));
1088 __ sub(esp, Immediate(kDoubleSize));
1089 __ movsd(Operand(esp, 0), kScratchDoubleReg);
1090 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1091 } else {
1092 __ movsd(kScratchDoubleReg, i.InputOperand(0));
1093 __ sub(esp, Immediate(kDoubleSize));
1094 __ movsd(Operand(esp, 0), kScratchDoubleReg);
1095 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1096 }
1097 break;
1098 case kIA32Push:
1099 if (instr->InputAt(0)->IsDoubleRegister()) {
1100 __ sub(esp, Immediate(kDoubleSize));
1101 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0));
1102 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1103 } else if (HasImmediateInput(instr, 0)) {
1104 __ push(i.InputImmediate(0));
1105 frame_access_state()->IncreaseSPDelta(1);
1106 } else {
1107 __ push(i.InputOperand(0));
1108 frame_access_state()->IncreaseSPDelta(1);
1109 }
1110 break;
1111 case kIA32Poke: {
1112 int const slot = MiscField::decode(instr->opcode());
1113 if (HasImmediateInput(instr, 0)) {
1114 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0));
1115 } else {
1116 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0));
1117 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001118 break;
1119 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001120 case kCheckedLoadInt8:
1121 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_b);
1122 break;
1123 case kCheckedLoadUint8:
1124 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_b);
1125 break;
1126 case kCheckedLoadInt16:
1127 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_w);
1128 break;
1129 case kCheckedLoadUint16:
1130 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_w);
1131 break;
1132 case kCheckedLoadWord32:
1133 ASSEMBLE_CHECKED_LOAD_INTEGER(mov);
1134 break;
1135 case kCheckedLoadFloat32:
1136 ASSEMBLE_CHECKED_LOAD_FLOAT(movss);
1137 break;
1138 case kCheckedLoadFloat64:
1139 ASSEMBLE_CHECKED_LOAD_FLOAT(movsd);
1140 break;
1141 case kCheckedStoreWord8:
1142 ASSEMBLE_CHECKED_STORE_INTEGER(mov_b);
1143 break;
1144 case kCheckedStoreWord16:
1145 ASSEMBLE_CHECKED_STORE_INTEGER(mov_w);
1146 break;
1147 case kCheckedStoreWord32:
1148 ASSEMBLE_CHECKED_STORE_INTEGER(mov);
1149 break;
1150 case kCheckedStoreFloat32:
1151 ASSEMBLE_CHECKED_STORE_FLOAT(movss);
1152 break;
1153 case kCheckedStoreFloat64:
1154 ASSEMBLE_CHECKED_STORE_FLOAT(movsd);
1155 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 case kIA32StackCheck: {
1157 ExternalReference const stack_limit =
1158 ExternalReference::address_of_stack_limit(isolate());
1159 __ cmp(esp, Operand::StaticVariable(stack_limit));
1160 break;
1161 }
1162 case kCheckedLoadWord64:
1163 case kCheckedStoreWord64:
1164 UNREACHABLE(); // currently unsupported checked int64 load/store.
1165 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168
1169
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001170// Assembles a branch after an instruction.
1171void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 IA32OperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001173 Label::Distance flabel_distance =
1174 branch->fallthru ? Label::kNear : Label::kFar;
1175 Label* tlabel = branch->true_label;
1176 Label* flabel = branch->false_label;
1177 switch (branch->condition) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 case kUnorderedEqual:
1179 __ j(parity_even, flabel, flabel_distance);
1180 // Fall through.
1181 case kEqual:
1182 __ j(equal, tlabel);
1183 break;
1184 case kUnorderedNotEqual:
1185 __ j(parity_even, tlabel);
1186 // Fall through.
1187 case kNotEqual:
1188 __ j(not_equal, tlabel);
1189 break;
1190 case kSignedLessThan:
1191 __ j(less, tlabel);
1192 break;
1193 case kSignedGreaterThanOrEqual:
1194 __ j(greater_equal, tlabel);
1195 break;
1196 case kSignedLessThanOrEqual:
1197 __ j(less_equal, tlabel);
1198 break;
1199 case kSignedGreaterThan:
1200 __ j(greater, tlabel);
1201 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001202 case kUnsignedLessThan:
1203 __ j(below, tlabel);
1204 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 case kUnsignedGreaterThanOrEqual:
1206 __ j(above_equal, tlabel);
1207 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001208 case kUnsignedLessThanOrEqual:
1209 __ j(below_equal, tlabel);
1210 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001211 case kUnsignedGreaterThan:
1212 __ j(above, tlabel);
1213 break;
1214 case kOverflow:
1215 __ j(overflow, tlabel);
1216 break;
1217 case kNotOverflow:
1218 __ j(no_overflow, tlabel);
1219 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001220 default:
1221 UNREACHABLE();
1222 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001223 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001224 // Add a jump if not falling through to the next block.
1225 if (!branch->fallthru) __ jmp(flabel);
1226}
1227
1228
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001229void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001230 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231}
1232
1233
1234// Assembles boolean materializations after an instruction.
1235void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1236 FlagsCondition condition) {
1237 IA32OperandConverter i(this, instr);
1238 Label done;
1239
1240 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1241 // last output of the instruction.
1242 Label check;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001243 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001244 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1245 Condition cc = no_condition;
1246 switch (condition) {
1247 case kUnorderedEqual:
1248 __ j(parity_odd, &check, Label::kNear);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001249 __ Move(reg, Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250 __ jmp(&done, Label::kNear);
1251 // Fall through.
1252 case kEqual:
1253 cc = equal;
1254 break;
1255 case kUnorderedNotEqual:
1256 __ j(parity_odd, &check, Label::kNear);
1257 __ mov(reg, Immediate(1));
1258 __ jmp(&done, Label::kNear);
1259 // Fall through.
1260 case kNotEqual:
1261 cc = not_equal;
1262 break;
1263 case kSignedLessThan:
1264 cc = less;
1265 break;
1266 case kSignedGreaterThanOrEqual:
1267 cc = greater_equal;
1268 break;
1269 case kSignedLessThanOrEqual:
1270 cc = less_equal;
1271 break;
1272 case kSignedGreaterThan:
1273 cc = greater;
1274 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001275 case kUnsignedLessThan:
1276 cc = below;
1277 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 case kUnsignedGreaterThanOrEqual:
1279 cc = above_equal;
1280 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 case kUnsignedLessThanOrEqual:
1282 cc = below_equal;
1283 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001284 case kUnsignedGreaterThan:
1285 cc = above;
1286 break;
1287 case kOverflow:
1288 cc = overflow;
1289 break;
1290 case kNotOverflow:
1291 cc = no_overflow;
1292 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001293 default:
1294 UNREACHABLE();
1295 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 }
1297 __ bind(&check);
1298 if (reg.is_byte_register()) {
1299 // setcc for byte registers (al, bl, cl, dl).
1300 __ setcc(cc, reg);
1301 __ movzx_b(reg, reg);
1302 } else {
1303 // Emit a branch to set a register to either 1 or 0.
1304 Label set;
1305 __ j(cc, &set, Label::kNear);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001306 __ Move(reg, Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 __ jmp(&done, Label::kNear);
1308 __ bind(&set);
1309 __ mov(reg, Immediate(1));
1310 }
1311 __ bind(&done);
1312}
1313
1314
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1316 IA32OperandConverter i(this, instr);
1317 Register input = i.InputRegister(0);
1318 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1319 __ cmp(input, Immediate(i.InputInt32(index + 0)));
1320 __ j(equal, GetLabel(i.InputRpo(index + 1)));
1321 }
1322 AssembleArchJump(i.InputRpo(1));
1323}
1324
1325
1326void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1327 IA32OperandConverter i(this, instr);
1328 Register input = i.InputRegister(0);
1329 size_t const case_count = instr->InputCount() - 2;
1330 Label** cases = zone()->NewArray<Label*>(case_count);
1331 for (size_t index = 0; index < case_count; ++index) {
1332 cases[index] = GetLabel(i.InputRpo(index + 2));
1333 }
1334 Label* const table = AddJumpTable(cases, case_count);
1335 __ cmp(input, Immediate(case_count));
1336 __ j(above_equal, GetLabel(i.InputRpo(1)));
1337 __ jmp(Operand::JumpTable(input, times_4, table));
1338}
1339
1340
1341void CodeGenerator::AssembleDeoptimizerCall(
1342 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001343 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001345 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1346}
1347
1348
1349// The calling convention for JSFunctions on IA32 passes arguments on the
1350// stack and the JSFunction and context in EDI and ESI, respectively, thus
1351// the steps of the call look as follows:
1352
1353// --{ before the call instruction }--------------------------------------------
1354// | caller frame |
1355// ^ esp ^ ebp
1356
1357// --{ push arguments and setup ESI, EDI }--------------------------------------
1358// | args + receiver | caller frame |
1359// ^ esp ^ ebp
1360// [edi = JSFunction, esi = context]
1361
1362// --{ call [edi + kCodeEntryOffset] }------------------------------------------
1363// | RET | args + receiver | caller frame |
1364// ^ esp ^ ebp
1365
1366// =={ prologue of called function }============================================
1367// --{ push ebp }---------------------------------------------------------------
1368// | FP | RET | args + receiver | caller frame |
1369// ^ esp ^ ebp
1370
1371// --{ mov ebp, esp }-----------------------------------------------------------
1372// | FP | RET | args + receiver | caller frame |
1373// ^ ebp,esp
1374
1375// --{ push esi }---------------------------------------------------------------
1376// | CTX | FP | RET | args + receiver | caller frame |
1377// ^esp ^ ebp
1378
1379// --{ push edi }---------------------------------------------------------------
1380// | FNC | CTX | FP | RET | args + receiver | caller frame |
1381// ^esp ^ ebp
1382
1383// --{ subi esp, #N }-----------------------------------------------------------
1384// | callee frame | FNC | CTX | FP | RET | args + receiver | caller frame |
1385// ^esp ^ ebp
1386
1387// =={ body of called function }================================================
1388
1389// =={ epilogue of called function }============================================
1390// --{ mov esp, ebp }-----------------------------------------------------------
1391// | FP | RET | args + receiver | caller frame |
1392// ^ esp,ebp
1393
1394// --{ pop ebp }-----------------------------------------------------------
1395// | | RET | args + receiver | caller frame |
1396// ^ esp ^ ebp
1397
1398// --{ ret #A+1 }-----------------------------------------------------------
1399// | | caller frame |
1400// ^ esp ^ ebp
1401
1402
1403// Runtime function calls are accomplished by doing a stub call to the
1404// CEntryStub (a real code object). On IA32 passes arguments on the
1405// stack, the number of arguments in EAX, the address of the runtime function
1406// in EBX, and the context in ESI.
1407
1408// --{ before the call instruction }--------------------------------------------
1409// | caller frame |
1410// ^ esp ^ ebp
1411
1412// --{ push arguments and setup EAX, EBX, and ESI }-----------------------------
1413// | args + receiver | caller frame |
1414// ^ esp ^ ebp
1415// [eax = #args, ebx = runtime function, esi = context]
1416
1417// --{ call #CEntryStub }-------------------------------------------------------
1418// | RET | args + receiver | caller frame |
1419// ^ esp ^ ebp
1420
1421// =={ body of runtime function }===============================================
1422
1423// --{ runtime returns }--------------------------------------------------------
1424// | caller frame |
1425// ^ esp ^ ebp
1426
1427// Other custom linkages (e.g. for calling directly into and out of C++) may
1428// need to save callee-saved registers on the stack, which is done in the
1429// function prologue of generated code.
1430
1431// --{ before the call instruction }--------------------------------------------
1432// | caller frame |
1433// ^ esp ^ ebp
1434
1435// --{ set up arguments in registers on stack }---------------------------------
1436// | args | caller frame |
1437// ^ esp ^ ebp
1438// [r0 = arg0, r1 = arg1, ...]
1439
1440// --{ call code }--------------------------------------------------------------
1441// | RET | args | caller frame |
1442// ^ esp ^ ebp
1443
1444// =={ prologue of called function }============================================
1445// --{ push ebp }---------------------------------------------------------------
1446// | FP | RET | args | caller frame |
1447// ^ esp ^ ebp
1448
1449// --{ mov ebp, esp }-----------------------------------------------------------
1450// | FP | RET | args | caller frame |
1451// ^ ebp,esp
1452
1453// --{ save registers }---------------------------------------------------------
1454// | regs | FP | RET | args | caller frame |
1455// ^ esp ^ ebp
1456
1457// --{ subi esp, #N }-----------------------------------------------------------
1458// | callee frame | regs | FP | RET | args | caller frame |
1459// ^esp ^ ebp
1460
1461// =={ body of called function }================================================
1462
1463// =={ epilogue of called function }============================================
1464// --{ restore registers }------------------------------------------------------
1465// | regs | FP | RET | args | caller frame |
1466// ^ esp ^ ebp
1467
1468// --{ mov esp, ebp }-----------------------------------------------------------
1469// | FP | RET | args | caller frame |
1470// ^ esp,ebp
1471
1472// --{ pop ebp }----------------------------------------------------------------
1473// | RET | args | caller frame |
1474// ^ esp ^ ebp
1475
1476
1477void CodeGenerator::AssemblePrologue() {
1478 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 if (descriptor->IsCFunctionCall()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001480 // Assemble a prologue similar the to cdecl calling convention.
1481 __ push(ebp);
1482 __ mov(ebp, esp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483 } else if (descriptor->IsJSFunctionCall()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 // TODO(turbofan): this prologue is redundant with OSR, but still needed for
1485 // code aging.
1486 __ Prologue(this->info()->GeneratePreagedPrologue());
1487 } else if (frame()->needs_frame()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488 __ StubPrologue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489 } else {
1490 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001491 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001492 frame_access_state()->SetFrameAccessToDefault();
1493
1494 int stack_shrink_slots = frame()->GetSpillSlotCount();
1495 if (info()->is_osr()) {
1496 // TurboFan OSR-compiled functions cannot be entered directly.
1497 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1498
1499 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1500 // frame is still on the stack. Optimized code uses OSR values directly from
1501 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1502 // remaining stack slots.
1503 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1504 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1506 }
1507
1508 const RegList saves = descriptor->CalleeSavedRegisters();
1509 if (stack_shrink_slots > 0) {
1510 __ sub(esp, Immediate(stack_shrink_slots * kPointerSize));
1511 }
1512
1513 if (saves != 0) { // Save callee-saved registers.
1514 DCHECK(!info()->is_osr());
1515 int pushed = 0;
1516 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
1517 if (!((1 << i) & saves)) continue;
1518 __ push(Register::from_code(i));
1519 ++pushed;
1520 }
1521 frame()->AllocateSavedCalleeRegisterSlots(pushed);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001522 }
1523}
1524
1525
1526void CodeGenerator::AssembleReturn() {
1527 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528
1529 const RegList saves = descriptor->CalleeSavedRegisters();
1530 // Restore registers.
1531 if (saves != 0) {
1532 for (int i = 0; i < Register::kNumRegisters; i++) {
1533 if (!((1 << i) & saves)) continue;
1534 __ pop(Register::from_code(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001535 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 }
1537
1538 if (descriptor->IsCFunctionCall()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539 __ mov(esp, ebp); // Move stack pointer back to frame pointer.
1540 __ pop(ebp); // Pop caller's frame pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 } else if (frame()->needs_frame()) {
1542 // Canonicalize JSFunction return sites for now.
1543 if (return_label_.is_bound()) {
1544 __ jmp(&return_label_);
1545 return;
1546 } else {
1547 __ bind(&return_label_);
1548 __ mov(esp, ebp); // Move stack pointer back to frame pointer.
1549 __ pop(ebp); // Pop caller's frame pointer.
1550 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001551 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
1553 // Might need ecx for scratch if pop_size is too big.
1554 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit());
1555 __ Ret(static_cast<int>(pop_size), ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556}
1557
1558
1559void CodeGenerator::AssembleMove(InstructionOperand* source,
1560 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001561 IA32OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001562 // Dispatch on the source and destination operand kinds. Not all
1563 // combinations are possible.
1564 if (source->IsRegister()) {
1565 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1566 Register src = g.ToRegister(source);
1567 Operand dst = g.ToOperand(destination);
1568 __ mov(dst, src);
1569 } else if (source->IsStackSlot()) {
1570 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1571 Operand src = g.ToOperand(source);
1572 if (destination->IsRegister()) {
1573 Register dst = g.ToRegister(destination);
1574 __ mov(dst, src);
1575 } else {
1576 Operand dst = g.ToOperand(destination);
1577 __ push(src);
1578 __ pop(dst);
1579 }
1580 } else if (source->IsConstant()) {
1581 Constant src_constant = g.ToConstant(source);
1582 if (src_constant.type() == Constant::kHeapObject) {
1583 Handle<HeapObject> src = src_constant.ToHeapObject();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 int offset;
1585 if (IsMaterializableFromFrame(src, &offset)) {
1586 if (destination->IsRegister()) {
1587 Register dst = g.ToRegister(destination);
1588 __ mov(dst, g.ToMaterializableOperand(offset));
1589 } else {
1590 DCHECK(destination->IsStackSlot());
1591 Operand dst = g.ToOperand(destination);
1592 __ push(g.ToMaterializableOperand(offset));
1593 __ pop(dst);
1594 }
1595 } else if (destination->IsRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001596 Register dst = g.ToRegister(destination);
1597 __ LoadHeapObject(dst, src);
1598 } else {
1599 DCHECK(destination->IsStackSlot());
1600 Operand dst = g.ToOperand(destination);
1601 AllowDeferredHandleDereference embedding_raw_address;
1602 if (isolate()->heap()->InNewSpace(*src)) {
1603 __ PushHeapObject(src);
1604 __ pop(dst);
1605 } else {
1606 __ mov(dst, src);
1607 }
1608 }
1609 } else if (destination->IsRegister()) {
1610 Register dst = g.ToRegister(destination);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001611 __ Move(dst, g.ToImmediate(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 } else if (destination->IsStackSlot()) {
1613 Operand dst = g.ToOperand(destination);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614 __ Move(dst, g.ToImmediate(source));
1615 } else if (src_constant.type() == Constant::kFloat32) {
1616 // TODO(turbofan): Can we do better here?
1617 uint32_t src = bit_cast<uint32_t>(src_constant.ToFloat32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 if (destination->IsDoubleRegister()) {
1619 XMMRegister dst = g.ToDoubleRegister(destination);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001620 __ Move(dst, src);
1621 } else {
1622 DCHECK(destination->IsDoubleStackSlot());
1623 Operand dst = g.ToOperand(destination);
1624 __ Move(dst, Immediate(src));
1625 }
1626 } else {
1627 DCHECK_EQ(Constant::kFloat64, src_constant.type());
1628 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64());
1629 uint32_t lower = static_cast<uint32_t>(src);
1630 uint32_t upper = static_cast<uint32_t>(src >> 32);
1631 if (destination->IsDoubleRegister()) {
1632 XMMRegister dst = g.ToDoubleRegister(destination);
1633 __ Move(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634 } else {
1635 DCHECK(destination->IsDoubleStackSlot());
1636 Operand dst0 = g.ToOperand(destination);
1637 Operand dst1 = g.HighOperand(destination);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001638 __ Move(dst0, Immediate(lower));
1639 __ Move(dst1, Immediate(upper));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640 }
1641 }
1642 } else if (source->IsDoubleRegister()) {
1643 XMMRegister src = g.ToDoubleRegister(source);
1644 if (destination->IsDoubleRegister()) {
1645 XMMRegister dst = g.ToDoubleRegister(destination);
1646 __ movaps(dst, src);
1647 } else {
1648 DCHECK(destination->IsDoubleStackSlot());
1649 Operand dst = g.ToOperand(destination);
1650 __ movsd(dst, src);
1651 }
1652 } else if (source->IsDoubleStackSlot()) {
1653 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1654 Operand src = g.ToOperand(source);
1655 if (destination->IsDoubleRegister()) {
1656 XMMRegister dst = g.ToDoubleRegister(destination);
1657 __ movsd(dst, src);
1658 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 Operand dst = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001660 __ movsd(kScratchDoubleReg, src);
1661 __ movsd(dst, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 }
1663 } else {
1664 UNREACHABLE();
1665 }
1666}
1667
1668
1669void CodeGenerator::AssembleSwap(InstructionOperand* source,
1670 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001671 IA32OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001672 // Dispatch on the source and destination operand kinds. Not all
1673 // combinations are possible.
1674 if (source->IsRegister() && destination->IsRegister()) {
1675 // Register-register.
1676 Register src = g.ToRegister(source);
1677 Register dst = g.ToRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001678 __ push(src);
1679 __ mov(src, dst);
1680 __ pop(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 } else if (source->IsRegister() && destination->IsStackSlot()) {
1682 // Register-memory.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001683 Register src = g.ToRegister(source);
1684 __ push(src);
1685 frame_access_state()->IncreaseSPDelta(1);
1686 Operand dst = g.ToOperand(destination);
1687 __ mov(src, dst);
1688 frame_access_state()->IncreaseSPDelta(-1);
1689 dst = g.ToOperand(destination);
1690 __ pop(dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 } else if (source->IsStackSlot() && destination->IsStackSlot()) {
1692 // Memory-memory.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 Operand dst1 = g.ToOperand(destination);
1694 __ push(dst1);
1695 frame_access_state()->IncreaseSPDelta(1);
1696 Operand src1 = g.ToOperand(source);
1697 __ push(src1);
1698 Operand dst2 = g.ToOperand(destination);
1699 __ pop(dst2);
1700 frame_access_state()->IncreaseSPDelta(-1);
1701 Operand src2 = g.ToOperand(source);
1702 __ pop(src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001703 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704 // XMM register-register swap.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705 XMMRegister src = g.ToDoubleRegister(source);
1706 XMMRegister dst = g.ToDoubleRegister(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001707 __ movaps(kScratchDoubleReg, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708 __ movaps(src, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 __ movaps(dst, kScratchDoubleReg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001710 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 // XMM register-memory swap.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001712 XMMRegister reg = g.ToDoubleRegister(source);
1713 Operand other = g.ToOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001714 __ movsd(kScratchDoubleReg, other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001715 __ movsd(other, reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 __ movaps(reg, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001717 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) {
1718 // Double-width memory-to-memory.
1719 Operand src0 = g.ToOperand(source);
1720 Operand src1 = g.HighOperand(source);
1721 Operand dst0 = g.ToOperand(destination);
1722 Operand dst1 = g.HighOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 __ movsd(kScratchDoubleReg, dst0); // Save destination in scratch register.
1724 __ push(src0); // Then use stack to copy source to destination.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001725 __ pop(dst0);
1726 __ push(src1);
1727 __ pop(dst1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001728 __ movsd(src0, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729 } else {
1730 // No other combinations are possible.
1731 UNREACHABLE();
1732 }
1733}
1734
1735
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1737 for (size_t index = 0; index < target_count; ++index) {
1738 __ dd(targets[index]);
1739 }
1740}
1741
1742
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001743void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
1744
1745
1746void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1748 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001749 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001750
1751 int space_needed = Deoptimizer::patch_size();
1752 // Ensure that we have enough space after the previous lazy-bailout
1753 // instruction for patching the code here.
1754 int current_pc = masm()->pc_offset();
1755 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
1756 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1757 __ Nop(padding_size);
1758 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759}
1760
1761#undef __
1762
1763} // namespace compiler
1764} // namespace internal
1765} // namespace v8