blob: 0eef24f8c5ffcc6f578691a414febd560d1046df [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010012#include "src/frames.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/x87/assembler-x87.h"
14#include "src/x87/frames-x87.h"
15#include "src/x87/macro-assembler-x87.h"
16
17namespace v8 {
18namespace internal {
19namespace compiler {
20
21#define __ masm()->
22
23
24// Adds X87 specific methods for decoding operands.
25class X87OperandConverter : public InstructionOperandConverter {
26 public:
27 X87OperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
30 Operand InputOperand(size_t index, int extra = 0) {
31 return ToOperand(instr_->InputAt(index), extra);
32 }
33
34 Immediate InputImmediate(size_t index) {
35 return ToImmediate(instr_->InputAt(index));
36 }
37
38 Operand OutputOperand() { return ToOperand(instr_->Output()); }
39
40 Operand ToOperand(InstructionOperand* op, int extra = 0) {
41 if (op->IsRegister()) {
42 DCHECK(extra == 0);
43 return Operand(ToRegister(op));
44 }
Ben Murdochc5610432016-08-08 18:44:38 +010045 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010046 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 }
48
Ben Murdochda12d292016-06-02 14:46:10 +010049 Operand SlotToOperand(int slot, int extra = 0) {
50 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
51 return Operand(offset.from_stack_pointer() ? esp : ebp,
52 offset.offset() + extra);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 }
54
55 Operand HighOperand(InstructionOperand* op) {
Ben Murdochc5610432016-08-08 18:44:38 +010056 DCHECK(op->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 return ToOperand(op, kPointerSize);
58 }
59
60 Immediate ToImmediate(InstructionOperand* operand) {
61 Constant constant = ToConstant(operand);
Ben Murdochc5610432016-08-08 18:44:38 +010062 if (constant.type() == Constant::kInt32 &&
63 (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
64 constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE)) {
65 return Immediate(reinterpret_cast<Address>(constant.ToInt32()),
66 constant.rmode());
67 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 switch (constant.type()) {
69 case Constant::kInt32:
70 return Immediate(constant.ToInt32());
71 case Constant::kFloat32:
72 return Immediate(
73 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
74 case Constant::kFloat64:
75 return Immediate(
76 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
77 case Constant::kExternalReference:
78 return Immediate(constant.ToExternalReference());
79 case Constant::kHeapObject:
80 return Immediate(constant.ToHeapObject());
81 case Constant::kInt64:
82 break;
83 case Constant::kRpoNumber:
84 return Immediate::CodeRelativeOffset(ToLabel(operand));
85 }
86 UNREACHABLE();
87 return Immediate(-1);
88 }
89
90 static size_t NextOffset(size_t* offset) {
91 size_t i = *offset;
92 (*offset)++;
93 return i;
94 }
95
96 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
97 STATIC_ASSERT(0 == static_cast<int>(times_1));
98 STATIC_ASSERT(1 == static_cast<int>(times_2));
99 STATIC_ASSERT(2 == static_cast<int>(times_4));
100 STATIC_ASSERT(3 == static_cast<int>(times_8));
101 int scale = static_cast<int>(mode - one);
102 DCHECK(scale >= 0 && scale < 4);
103 return static_cast<ScaleFactor>(scale);
104 }
105
106 Operand MemoryOperand(size_t* offset) {
107 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
108 switch (mode) {
109 case kMode_MR: {
110 Register base = InputRegister(NextOffset(offset));
111 int32_t disp = 0;
112 return Operand(base, disp);
113 }
114 case kMode_MRI: {
115 Register base = InputRegister(NextOffset(offset));
116 int32_t disp = InputInt32(NextOffset(offset));
117 return Operand(base, disp);
118 }
119 case kMode_MR1:
120 case kMode_MR2:
121 case kMode_MR4:
122 case kMode_MR8: {
123 Register base = InputRegister(NextOffset(offset));
124 Register index = InputRegister(NextOffset(offset));
125 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
126 int32_t disp = 0;
127 return Operand(base, index, scale, disp);
128 }
129 case kMode_MR1I:
130 case kMode_MR2I:
131 case kMode_MR4I:
132 case kMode_MR8I: {
133 Register base = InputRegister(NextOffset(offset));
134 Register index = InputRegister(NextOffset(offset));
135 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
136 int32_t disp = InputInt32(NextOffset(offset));
137 return Operand(base, index, scale, disp);
138 }
139 case kMode_M1:
140 case kMode_M2:
141 case kMode_M4:
142 case kMode_M8: {
143 Register index = InputRegister(NextOffset(offset));
144 ScaleFactor scale = ScaleFor(kMode_M1, mode);
145 int32_t disp = 0;
146 return Operand(index, scale, disp);
147 }
148 case kMode_M1I:
149 case kMode_M2I:
150 case kMode_M4I:
151 case kMode_M8I: {
152 Register index = InputRegister(NextOffset(offset));
153 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
154 int32_t disp = InputInt32(NextOffset(offset));
155 return Operand(index, scale, disp);
156 }
157 case kMode_MI: {
158 int32_t disp = InputInt32(NextOffset(offset));
159 return Operand(Immediate(disp));
160 }
161 case kMode_None:
162 UNREACHABLE();
163 return Operand(no_reg, 0);
164 }
165 UNREACHABLE();
166 return Operand(no_reg, 0);
167 }
168
169 Operand MemoryOperand(size_t first_input = 0) {
170 return MemoryOperand(&first_input);
171 }
172};
173
174
175namespace {
176
177bool HasImmediateInput(Instruction* instr, size_t index) {
178 return instr->InputAt(index)->IsImmediate();
179}
180
181
182class OutOfLineLoadInteger final : public OutOfLineCode {
183 public:
184 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
185 : OutOfLineCode(gen), result_(result) {}
186
187 void Generate() final { __ xor_(result_, result_); }
188
189 private:
190 Register const result_;
191};
192
193
194class OutOfLineLoadFloat final : public OutOfLineCode {
195 public:
196 OutOfLineLoadFloat(CodeGenerator* gen, X87Register result)
197 : OutOfLineCode(gen), result_(result) {}
198
199 void Generate() final {
200 DCHECK(result_.code() == 0);
201 USE(result_);
202 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
203 __ VerifyX87StackDepth(1);
204 }
205 __ fstp(0);
206 __ push(Immediate(0xffffffff));
207 __ push(Immediate(0x7fffffff));
208 __ fld_d(MemOperand(esp, 0));
209 __ lea(esp, Operand(esp, kDoubleSize));
210 }
211
212 private:
213 X87Register const result_;
214};
215
216
217class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
218 public:
219 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
220 X87Register input)
221 : OutOfLineCode(gen), result_(result), input_(input) {}
222
223 void Generate() final {
224 UNIMPLEMENTED();
225 USE(result_);
226 USE(input_);
227 }
228
229 private:
230 Register const result_;
231 X87Register const input_;
232};
233
234
235class OutOfLineRecordWrite final : public OutOfLineCode {
236 public:
237 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
238 Register value, Register scratch0, Register scratch1,
239 RecordWriteMode mode)
240 : OutOfLineCode(gen),
241 object_(object),
242 operand_(operand),
243 value_(value),
244 scratch0_(scratch0),
245 scratch1_(scratch1),
246 mode_(mode) {}
247
248 void Generate() final {
249 if (mode_ > RecordWriteMode::kValueIsPointer) {
250 __ JumpIfSmi(value_, exit());
251 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100252 __ CheckPageFlag(value_, scratch0_,
253 MemoryChunk::kPointersToHereAreInterestingMask, zero,
254 exit());
255 RememberedSetAction const remembered_set_action =
256 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
257 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 SaveFPRegsMode const save_fp_mode =
259 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
260 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100261 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 __ lea(scratch1_, operand_);
263 __ CallStub(&stub);
264 }
265
266 private:
267 Register const object_;
268 Operand const operand_;
269 Register const value_;
270 Register const scratch0_;
271 Register const scratch1_;
272 RecordWriteMode const mode_;
273};
274
275} // namespace
276
277
278#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
279 do { \
280 auto result = i.OutputDoubleRegister(); \
281 auto offset = i.InputRegister(0); \
282 DCHECK(result.code() == 0); \
283 if (instr->InputAt(1)->IsRegister()) { \
284 __ cmp(offset, i.InputRegister(1)); \
285 } else { \
286 __ cmp(offset, i.InputImmediate(1)); \
287 } \
288 OutOfLineCode* ool = new (zone()) OutOfLineLoadFloat(this, result); \
289 __ j(above_equal, ool->entry()); \
290 __ fstp(0); \
291 __ asm_instr(i.MemoryOperand(2)); \
292 __ bind(ool->exit()); \
293 } while (false)
294
295
296#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
297 do { \
298 auto result = i.OutputRegister(); \
299 auto offset = i.InputRegister(0); \
300 if (instr->InputAt(1)->IsRegister()) { \
301 __ cmp(offset, i.InputRegister(1)); \
302 } else { \
303 __ cmp(offset, i.InputImmediate(1)); \
304 } \
305 OutOfLineCode* ool = new (zone()) OutOfLineLoadInteger(this, result); \
306 __ j(above_equal, ool->entry()); \
307 __ asm_instr(result, i.MemoryOperand(2)); \
308 __ bind(ool->exit()); \
309 } while (false)
310
311
312#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
313 do { \
314 auto offset = i.InputRegister(0); \
315 if (instr->InputAt(1)->IsRegister()) { \
316 __ cmp(offset, i.InputRegister(1)); \
317 } else { \
318 __ cmp(offset, i.InputImmediate(1)); \
319 } \
320 Label done; \
321 DCHECK(i.InputDoubleRegister(2).code() == 0); \
322 __ j(above_equal, &done, Label::kNear); \
323 __ asm_instr(i.MemoryOperand(3)); \
324 __ bind(&done); \
325 } while (false)
326
327
328#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
329 do { \
330 auto offset = i.InputRegister(0); \
331 if (instr->InputAt(1)->IsRegister()) { \
332 __ cmp(offset, i.InputRegister(1)); \
333 } else { \
334 __ cmp(offset, i.InputImmediate(1)); \
335 } \
336 Label done; \
337 __ j(above_equal, &done, Label::kNear); \
338 if (instr->InputAt(2)->IsRegister()) { \
339 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \
340 } else { \
341 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \
342 } \
343 __ bind(&done); \
344 } while (false)
345
Ben Murdochda12d292016-06-02 14:46:10 +0100346#define ASSEMBLE_COMPARE(asm_instr) \
347 do { \
348 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \
349 size_t index = 0; \
350 Operand left = i.MemoryOperand(&index); \
351 if (HasImmediateInput(instr, index)) { \
352 __ asm_instr(left, i.InputImmediate(index)); \
353 } else { \
354 __ asm_instr(left, i.InputRegister(index)); \
355 } \
356 } else { \
357 if (HasImmediateInput(instr, 1)) { \
358 if (instr->InputAt(0)->IsRegister()) { \
359 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
360 } else { \
361 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
362 } \
363 } else { \
364 if (instr->InputAt(1)->IsRegister()) { \
365 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
366 } else { \
367 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
368 } \
369 } \
370 } \
371 } while (0)
372
373void CodeGenerator::AssembleDeconstructFrame() {
374 __ mov(esp, ebp);
375 __ pop(ebp);
376}
377
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
379 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
380 if (sp_slot_delta > 0) {
381 __ add(esp, Immediate(sp_slot_delta * kPointerSize));
382 }
383 frame_access_state()->SetFrameAccessToDefault();
384}
385
386
387void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
388 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
389 if (sp_slot_delta < 0) {
390 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize));
391 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
392 }
Ben Murdochda12d292016-06-02 14:46:10 +0100393 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000394 __ mov(ebp, MemOperand(ebp, 0));
395 }
396 frame_access_state()->SetFrameAccessToSP();
397}
398
Ben Murdochda12d292016-06-02 14:46:10 +0100399void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
400 Register, Register,
401 Register) {
402 // There are not enough temp registers left on ia32 for a call instruction
403 // so we pick some scratch registers and save/restore them manually here.
404 int scratch_count = 3;
405 Register scratch1 = ebx;
406 Register scratch2 = ecx;
407 Register scratch3 = edx;
408 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
409 Label done;
410
411 // Check if current frame is an arguments adaptor frame.
412 __ cmp(Operand(ebp, StandardFrameConstants::kContextOffset),
413 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
414 __ j(not_equal, &done, Label::kNear);
415
416 __ push(scratch1);
417 __ push(scratch2);
418 __ push(scratch3);
419
420 // Load arguments count from current arguments adaptor frame (note, it
421 // does not include receiver).
422 Register caller_args_count_reg = scratch1;
423 __ mov(caller_args_count_reg,
424 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
425 __ SmiUntag(caller_args_count_reg);
426
427 ParameterCount callee_args_count(args_reg);
428 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
429 scratch3, ReturnAddressState::kOnStack, scratch_count);
430 __ pop(scratch3);
431 __ pop(scratch2);
432 __ pop(scratch1);
433
434 __ bind(&done);
435}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000436
437// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100438CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
439 Instruction* instr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000440 X87OperandConverter i(this, instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100441 InstructionCode opcode = instr->opcode();
442 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
Ben Murdochda12d292016-06-02 14:46:10 +0100444 switch (arch_opcode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445 case kArchCallCodeObject: {
446 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
447 __ VerifyX87StackDepth(1);
448 }
449 __ fstp(0);
450 EnsureSpaceForLazyDeopt();
451 if (HasImmediateInput(instr, 0)) {
452 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
453 __ call(code, RelocInfo::CODE_TARGET);
454 } else {
455 Register reg = i.InputRegister(0);
456 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
457 __ call(reg);
458 }
459 RecordCallPosition(instr);
460 bool double_result =
Ben Murdochc5610432016-08-08 18:44:38 +0100461 instr->HasOutput() && instr->Output()->IsFPRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462 if (double_result) {
463 __ lea(esp, Operand(esp, -kDoubleSize));
464 __ fstp_d(Operand(esp, 0));
465 }
466 __ fninit();
467 if (double_result) {
468 __ fld_d(Operand(esp, 0));
469 __ lea(esp, Operand(esp, kDoubleSize));
470 } else {
471 __ fld1();
472 }
473 frame_access_state()->ClearSPDelta();
474 break;
475 }
Ben Murdochda12d292016-06-02 14:46:10 +0100476 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477 case kArchTailCallCodeObject: {
478 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
479 __ VerifyX87StackDepth(1);
480 }
481 __ fstp(0);
482 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
483 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100484 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
485 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
486 no_reg, no_reg, no_reg);
487 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488 if (HasImmediateInput(instr, 0)) {
489 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
490 __ jmp(code, RelocInfo::CODE_TARGET);
491 } else {
492 Register reg = i.InputRegister(0);
493 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
494 __ jmp(reg);
495 }
496 frame_access_state()->ClearSPDelta();
497 break;
498 }
Ben Murdochc5610432016-08-08 18:44:38 +0100499 case kArchTailCallAddress: {
500 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
501 AssembleDeconstructActivationRecord(stack_param_delta);
502 CHECK(!HasImmediateInput(instr, 0));
503 Register reg = i.InputRegister(0);
504 __ jmp(reg);
505 frame_access_state()->ClearSPDelta();
506 break;
507 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 case kArchCallJSFunction: {
509 EnsureSpaceForLazyDeopt();
510 Register func = i.InputRegister(0);
511 if (FLAG_debug_code) {
512 // Check the function's context matches the context argument.
513 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
514 __ Assert(equal, kWrongFunctionContext);
515 }
516 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
517 __ VerifyX87StackDepth(1);
518 }
519 __ fstp(0);
520 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset));
521 RecordCallPosition(instr);
522 bool double_result =
Ben Murdochc5610432016-08-08 18:44:38 +0100523 instr->HasOutput() && instr->Output()->IsFPRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524 if (double_result) {
525 __ lea(esp, Operand(esp, -kDoubleSize));
526 __ fstp_d(Operand(esp, 0));
527 }
528 __ fninit();
529 if (double_result) {
530 __ fld_d(Operand(esp, 0));
531 __ lea(esp, Operand(esp, kDoubleSize));
532 } else {
533 __ fld1();
534 }
535 frame_access_state()->ClearSPDelta();
536 break;
537 }
Ben Murdochda12d292016-06-02 14:46:10 +0100538 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000539 case kArchTailCallJSFunction: {
540 Register func = i.InputRegister(0);
541 if (FLAG_debug_code) {
542 // Check the function's context matches the context argument.
543 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
544 __ Assert(equal, kWrongFunctionContext);
545 }
546 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
547 __ VerifyX87StackDepth(1);
548 }
549 __ fstp(0);
550 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
551 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100552 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
553 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
554 no_reg, no_reg, no_reg);
555 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
557 frame_access_state()->ClearSPDelta();
558 break;
559 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560 case kArchPrepareCallCFunction: {
561 // Frame alignment requires using FP-relative frame addressing.
562 frame_access_state()->SetFrameAccessToFP();
563 int const num_parameters = MiscField::decode(instr->opcode());
564 __ PrepareCallCFunction(num_parameters, i.TempRegister(0));
565 break;
566 }
567 case kArchPrepareTailCall:
568 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
569 break;
570 case kArchCallCFunction: {
571 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
572 __ VerifyX87StackDepth(1);
573 }
574 __ fstp(0);
575 int const num_parameters = MiscField::decode(instr->opcode());
576 if (HasImmediateInput(instr, 0)) {
577 ExternalReference ref = i.InputExternalReference(0);
578 __ CallCFunction(ref, num_parameters);
579 } else {
580 Register func = i.InputRegister(0);
581 __ CallCFunction(func, num_parameters);
582 }
583 bool double_result =
Ben Murdochc5610432016-08-08 18:44:38 +0100584 instr->HasOutput() && instr->Output()->IsFPRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585 if (double_result) {
586 __ lea(esp, Operand(esp, -kDoubleSize));
587 __ fstp_d(Operand(esp, 0));
588 }
589 __ fninit();
590 if (double_result) {
591 __ fld_d(Operand(esp, 0));
592 __ lea(esp, Operand(esp, kDoubleSize));
593 } else {
594 __ fld1();
595 }
596 frame_access_state()->SetFrameAccessToDefault();
597 frame_access_state()->ClearSPDelta();
598 break;
599 }
600 case kArchJmp:
601 AssembleArchJump(i.InputRpo(0));
602 break;
603 case kArchLookupSwitch:
604 AssembleArchLookupSwitch(instr);
605 break;
606 case kArchTableSwitch:
607 AssembleArchTableSwitch(instr);
608 break;
609 case kArchNop:
610 case kArchThrowTerminator:
611 // don't emit code for nops.
612 break;
613 case kArchDeoptimize: {
614 int deopt_state_id =
615 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
616 int double_register_param_count = 0;
617 int x87_layout = 0;
618 for (size_t i = 0; i < instr->InputCount(); i++) {
Ben Murdochc5610432016-08-08 18:44:38 +0100619 if (instr->InputAt(i)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 double_register_param_count++;
621 }
622 }
623 // Currently we use only one X87 register. If double_register_param_count
624 // is bigger than 1, it means duplicated double register is added to input
625 // of this instruction.
626 if (double_register_param_count > 0) {
627 x87_layout = (0 << 3) | 1;
628 }
629 // The layout of x87 register stack is loaded on the top of FPU register
630 // stack for deoptimization.
631 __ push(Immediate(x87_layout));
632 __ fild_s(MemOperand(esp, 0));
633 __ lea(esp, Operand(esp, kPointerSize));
634
635 Deoptimizer::BailoutType bailout_type =
636 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100637 CodeGenResult result =
638 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
639 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 break;
641 }
642 case kArchRet:
643 AssembleReturn();
644 break;
645 case kArchFramePointer:
646 __ mov(i.OutputRegister(), ebp);
647 break;
648 case kArchStackPointer:
649 __ mov(i.OutputRegister(), esp);
650 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100651 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100652 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653 __ mov(i.OutputRegister(), Operand(ebp, 0));
654 } else {
655 __ mov(i.OutputRegister(), ebp);
656 }
657 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000658 case kArchTruncateDoubleToI: {
Ben Murdochc5610432016-08-08 18:44:38 +0100659 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000660 __ fld_d(i.InputOperand(0));
661 }
662 __ TruncateX87TOSToI(i.OutputRegister());
Ben Murdochc5610432016-08-08 18:44:38 +0100663 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 __ fstp(0);
665 }
666 break;
667 }
668 case kArchStoreWithWriteBarrier: {
669 RecordWriteMode mode =
670 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
671 Register object = i.InputRegister(0);
672 size_t index = 0;
673 Operand operand = i.MemoryOperand(&index);
674 Register value = i.InputRegister(index);
675 Register scratch0 = i.TempRegister(0);
676 Register scratch1 = i.TempRegister(1);
677 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
678 scratch0, scratch1, mode);
679 __ mov(operand, value);
680 __ CheckPageFlag(object, scratch0,
681 MemoryChunk::kPointersFromHereAreInterestingMask,
682 not_zero, ool->entry());
683 __ bind(ool->exit());
684 break;
685 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100686 case kArchStackSlot: {
687 FrameOffset offset =
688 frame_access_state()->GetFrameOffset(i.InputInt32(0));
689 Register base;
690 if (offset.from_stack_pointer()) {
691 base = esp;
692 } else {
693 base = ebp;
694 }
695 __ lea(i.OutputRegister(), Operand(base, offset.offset()));
696 break;
697 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698 case kX87Add:
699 if (HasImmediateInput(instr, 1)) {
700 __ add(i.InputOperand(0), i.InputImmediate(1));
701 } else {
702 __ add(i.InputRegister(0), i.InputOperand(1));
703 }
704 break;
705 case kX87And:
706 if (HasImmediateInput(instr, 1)) {
707 __ and_(i.InputOperand(0), i.InputImmediate(1));
708 } else {
709 __ and_(i.InputRegister(0), i.InputOperand(1));
710 }
711 break;
712 case kX87Cmp:
Ben Murdochda12d292016-06-02 14:46:10 +0100713 ASSEMBLE_COMPARE(cmp);
714 break;
715 case kX87Cmp16:
716 ASSEMBLE_COMPARE(cmpw);
717 break;
718 case kX87Cmp8:
719 ASSEMBLE_COMPARE(cmpb);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000720 break;
721 case kX87Test:
Ben Murdochda12d292016-06-02 14:46:10 +0100722 ASSEMBLE_COMPARE(test);
723 break;
724 case kX87Test16:
725 ASSEMBLE_COMPARE(test_w);
726 break;
727 case kX87Test8:
728 ASSEMBLE_COMPARE(test_b);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000729 break;
730 case kX87Imul:
731 if (HasImmediateInput(instr, 1)) {
732 __ imul(i.OutputRegister(), i.InputOperand(0), i.InputInt32(1));
733 } else {
734 __ imul(i.OutputRegister(), i.InputOperand(1));
735 }
736 break;
737 case kX87ImulHigh:
738 __ imul(i.InputRegister(1));
739 break;
740 case kX87UmulHigh:
741 __ mul(i.InputRegister(1));
742 break;
743 case kX87Idiv:
744 __ cdq();
745 __ idiv(i.InputOperand(1));
746 break;
747 case kX87Udiv:
748 __ Move(edx, Immediate(0));
749 __ div(i.InputOperand(1));
750 break;
751 case kX87Not:
752 __ not_(i.OutputOperand());
753 break;
754 case kX87Neg:
755 __ neg(i.OutputOperand());
756 break;
757 case kX87Or:
758 if (HasImmediateInput(instr, 1)) {
759 __ or_(i.InputOperand(0), i.InputImmediate(1));
760 } else {
761 __ or_(i.InputRegister(0), i.InputOperand(1));
762 }
763 break;
764 case kX87Xor:
765 if (HasImmediateInput(instr, 1)) {
766 __ xor_(i.InputOperand(0), i.InputImmediate(1));
767 } else {
768 __ xor_(i.InputRegister(0), i.InputOperand(1));
769 }
770 break;
771 case kX87Sub:
772 if (HasImmediateInput(instr, 1)) {
773 __ sub(i.InputOperand(0), i.InputImmediate(1));
774 } else {
775 __ sub(i.InputRegister(0), i.InputOperand(1));
776 }
777 break;
778 case kX87Shl:
779 if (HasImmediateInput(instr, 1)) {
780 __ shl(i.OutputOperand(), i.InputInt5(1));
781 } else {
782 __ shl_cl(i.OutputOperand());
783 }
784 break;
785 case kX87Shr:
786 if (HasImmediateInput(instr, 1)) {
787 __ shr(i.OutputOperand(), i.InputInt5(1));
788 } else {
789 __ shr_cl(i.OutputOperand());
790 }
791 break;
792 case kX87Sar:
793 if (HasImmediateInput(instr, 1)) {
794 __ sar(i.OutputOperand(), i.InputInt5(1));
795 } else {
796 __ sar_cl(i.OutputOperand());
797 }
798 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100799 case kX87AddPair: {
800 // i.OutputRegister(0) == i.InputRegister(0) ... left low word.
801 // i.InputRegister(1) ... left high word.
802 // i.InputRegister(2) ... right low word.
803 // i.InputRegister(3) ... right high word.
804 bool use_temp = false;
805 if (i.OutputRegister(0).code() == i.InputRegister(1).code() ||
806 i.OutputRegister(0).code() == i.InputRegister(3).code()) {
807 // We cannot write to the output register directly, because it would
808 // overwrite an input for adc. We have to use the temp register.
809 use_temp = true;
810 __ Move(i.TempRegister(0), i.InputRegister(0));
811 __ add(i.TempRegister(0), i.InputRegister(2));
812 } else {
813 __ add(i.OutputRegister(0), i.InputRegister(2));
814 }
815 __ adc(i.InputRegister(1), Operand(i.InputRegister(3)));
816 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) {
817 __ Move(i.OutputRegister(1), i.InputRegister(1));
818 }
819 if (use_temp) {
820 __ Move(i.OutputRegister(0), i.TempRegister(0));
821 }
822 break;
823 }
824 case kX87SubPair: {
825 // i.OutputRegister(0) == i.InputRegister(0) ... left low word.
826 // i.InputRegister(1) ... left high word.
827 // i.InputRegister(2) ... right low word.
828 // i.InputRegister(3) ... right high word.
829 bool use_temp = false;
830 if (i.OutputRegister(0).code() == i.InputRegister(1).code() ||
831 i.OutputRegister(0).code() == i.InputRegister(3).code()) {
832 // We cannot write to the output register directly, because it would
833 // overwrite an input for adc. We have to use the temp register.
834 use_temp = true;
835 __ Move(i.TempRegister(0), i.InputRegister(0));
836 __ sub(i.TempRegister(0), i.InputRegister(2));
837 } else {
838 __ sub(i.OutputRegister(0), i.InputRegister(2));
839 }
840 __ sbb(i.InputRegister(1), Operand(i.InputRegister(3)));
841 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) {
842 __ Move(i.OutputRegister(1), i.InputRegister(1));
843 }
844 if (use_temp) {
845 __ Move(i.OutputRegister(0), i.TempRegister(0));
846 }
847 break;
848 }
849 case kX87MulPair: {
850 __ imul(i.OutputRegister(1), i.InputOperand(0));
851 __ mov(i.TempRegister(0), i.InputOperand(1));
852 __ imul(i.TempRegister(0), i.InputOperand(2));
853 __ add(i.OutputRegister(1), i.TempRegister(0));
854 __ mov(i.OutputRegister(0), i.InputOperand(0));
855 // Multiplies the low words and stores them in eax and edx.
856 __ mul(i.InputRegister(2));
857 __ add(i.OutputRegister(1), i.TempRegister(0));
858
859 break;
860 }
861 case kX87ShlPair:
862 if (HasImmediateInput(instr, 2)) {
863 __ ShlPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
864 } else {
865 // Shift has been loaded into CL by the register allocator.
866 __ ShlPair_cl(i.InputRegister(1), i.InputRegister(0));
867 }
868 break;
869 case kX87ShrPair:
870 if (HasImmediateInput(instr, 2)) {
871 __ ShrPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
872 } else {
873 // Shift has been loaded into CL by the register allocator.
874 __ ShrPair_cl(i.InputRegister(1), i.InputRegister(0));
875 }
876 break;
877 case kX87SarPair:
878 if (HasImmediateInput(instr, 2)) {
879 __ SarPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
880 } else {
881 // Shift has been loaded into CL by the register allocator.
882 __ SarPair_cl(i.InputRegister(1), i.InputRegister(0));
883 }
884 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000885 case kX87Ror:
886 if (HasImmediateInput(instr, 1)) {
887 __ ror(i.OutputOperand(), i.InputInt5(1));
888 } else {
889 __ ror_cl(i.OutputOperand());
890 }
891 break;
892 case kX87Lzcnt:
893 __ Lzcnt(i.OutputRegister(), i.InputOperand(0));
894 break;
895 case kX87Popcnt:
896 __ Popcnt(i.OutputRegister(), i.InputOperand(0));
897 break;
898 case kX87LoadFloat64Constant: {
899 InstructionOperand* source = instr->InputAt(0);
900 InstructionOperand* destination = instr->Output();
901 DCHECK(source->IsConstant());
902 X87OperandConverter g(this, nullptr);
903 Constant src_constant = g.ToConstant(source);
904
905 DCHECK_EQ(Constant::kFloat64, src_constant.type());
906 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64());
907 uint32_t lower = static_cast<uint32_t>(src);
908 uint32_t upper = static_cast<uint32_t>(src >> 32);
Ben Murdochc5610432016-08-08 18:44:38 +0100909 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000910 __ sub(esp, Immediate(kDoubleSize));
911 __ mov(MemOperand(esp, 0), Immediate(lower));
912 __ mov(MemOperand(esp, kInt32Size), Immediate(upper));
913 __ fstp(0);
914 __ fld_d(MemOperand(esp, 0));
915 __ add(esp, Immediate(kDoubleSize));
916 } else {
917 UNREACHABLE();
918 }
919 break;
920 }
921 case kX87Float32Cmp: {
922 __ fld_s(MemOperand(esp, kFloatSize));
923 __ fld_s(MemOperand(esp, 0));
924 __ FCmp();
925 __ lea(esp, Operand(esp, 2 * kFloatSize));
926 break;
927 }
928 case kX87Float32Add: {
929 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
930 __ VerifyX87StackDepth(1);
931 }
932 __ X87SetFPUCW(0x027F);
933 __ fstp(0);
934 __ fld_s(MemOperand(esp, 0));
935 __ fld_s(MemOperand(esp, kFloatSize));
936 __ faddp();
937 // Clear stack.
938 __ lea(esp, Operand(esp, 2 * kFloatSize));
939 // Restore the default value of control word.
940 __ X87SetFPUCW(0x037F);
941 break;
942 }
943 case kX87Float32Sub: {
944 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
945 __ VerifyX87StackDepth(1);
946 }
947 __ X87SetFPUCW(0x027F);
948 __ fstp(0);
949 __ fld_s(MemOperand(esp, kFloatSize));
950 __ fld_s(MemOperand(esp, 0));
951 __ fsubp();
952 // Clear stack.
953 __ lea(esp, Operand(esp, 2 * kFloatSize));
954 // Restore the default value of control word.
955 __ X87SetFPUCW(0x037F);
956 break;
957 }
958 case kX87Float32Mul: {
959 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
960 __ VerifyX87StackDepth(1);
961 }
962 __ X87SetFPUCW(0x027F);
963 __ fstp(0);
964 __ fld_s(MemOperand(esp, kFloatSize));
965 __ fld_s(MemOperand(esp, 0));
966 __ fmulp();
967 // Clear stack.
968 __ lea(esp, Operand(esp, 2 * kFloatSize));
969 // Restore the default value of control word.
970 __ X87SetFPUCW(0x037F);
971 break;
972 }
973 case kX87Float32Div: {
974 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
975 __ VerifyX87StackDepth(1);
976 }
977 __ X87SetFPUCW(0x027F);
978 __ fstp(0);
979 __ fld_s(MemOperand(esp, kFloatSize));
980 __ fld_s(MemOperand(esp, 0));
981 __ fdivp();
982 // Clear stack.
983 __ lea(esp, Operand(esp, 2 * kFloatSize));
984 // Restore the default value of control word.
985 __ X87SetFPUCW(0x037F);
986 break;
987 }
988 case kX87Float32Max: {
989 Label check_nan_left, check_zero, return_left, return_right;
990 Condition condition = below;
991 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
992 __ VerifyX87StackDepth(1);
993 }
994 __ fstp(0);
995 __ fld_s(MemOperand(esp, kFloatSize));
996 __ fld_s(MemOperand(esp, 0));
997 __ fld(1);
998 __ fld(1);
999 __ FCmp();
1000
1001 // At least one NaN.
1002 // Return the second operands if one of the two operands is NaN
1003 __ j(parity_even, &return_right, Label::kNear);
1004 __ j(equal, &check_zero, Label::kNear); // left == right.
1005 __ j(condition, &return_left, Label::kNear);
1006 __ jmp(&return_right, Label::kNear);
1007
1008 __ bind(&check_zero);
1009 __ fld(0);
1010 __ fldz();
1011 __ FCmp();
1012 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1013
1014 __ fadd(1);
1015 __ jmp(&return_left, Label::kNear);
1016
1017 __ bind(&return_right);
1018 __ fxch();
1019
1020 __ bind(&return_left);
1021 __ fstp(0);
1022 __ lea(esp, Operand(esp, 2 * kFloatSize));
1023 break;
1024 }
1025 case kX87Float32Min: {
1026 Label check_nan_left, check_zero, return_left, return_right;
1027 Condition condition = above;
1028 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1029 __ VerifyX87StackDepth(1);
1030 }
1031 __ fstp(0);
1032 __ fld_s(MemOperand(esp, kFloatSize));
1033 __ fld_s(MemOperand(esp, 0));
1034 __ fld(1);
1035 __ fld(1);
1036 __ FCmp();
1037 // At least one NaN.
1038 // Return the second operands if one of the two operands is NaN
1039 __ j(parity_even, &return_right, Label::kNear);
1040 __ j(equal, &check_zero, Label::kNear); // left == right.
1041 __ j(condition, &return_left, Label::kNear);
1042 __ jmp(&return_right, Label::kNear);
1043
1044 __ bind(&check_zero);
1045 __ fld(0);
1046 __ fldz();
1047 __ FCmp();
1048 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1049 // At this point, both left and right are either 0 or -0.
1050 // Push st0 and st1 to stack, then pop them to temp registers and OR them,
1051 // load it to left.
1052 __ push(eax);
1053 __ fld(1);
1054 __ fld(1);
1055 __ sub(esp, Immediate(2 * kPointerSize));
1056 __ fstp_s(MemOperand(esp, 0));
1057 __ fstp_s(MemOperand(esp, kPointerSize));
1058 __ pop(eax);
1059 __ xor_(MemOperand(esp, 0), eax);
1060 __ fstp(0);
1061 __ fld_s(MemOperand(esp, 0));
1062 __ pop(eax); // restore esp
1063 __ pop(eax); // restore esp
1064 __ jmp(&return_left, Label::kNear);
1065
1066
1067 __ bind(&return_right);
1068 __ fxch();
1069
1070 __ bind(&return_left);
1071 __ fstp(0);
1072 __ lea(esp, Operand(esp, 2 * kFloatSize));
1073 break;
1074 }
1075 case kX87Float32Sqrt: {
1076 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1077 __ VerifyX87StackDepth(1);
1078 }
1079 __ fstp(0);
1080 __ fld_s(MemOperand(esp, 0));
1081 __ fsqrt();
1082 __ lea(esp, Operand(esp, kFloatSize));
1083 break;
1084 }
1085 case kX87Float32Abs: {
1086 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1087 __ VerifyX87StackDepth(1);
1088 }
1089 __ fstp(0);
1090 __ fld_s(MemOperand(esp, 0));
1091 __ fabs();
1092 __ lea(esp, Operand(esp, kFloatSize));
1093 break;
1094 }
1095 case kX87Float32Round: {
1096 RoundingMode mode =
1097 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1098 // Set the correct round mode in x87 control register
1099 __ X87SetRC((mode << 10));
1100
Ben Murdochc5610432016-08-08 18:44:38 +01001101 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102 InstructionOperand* input = instr->InputAt(0);
1103 USE(input);
Ben Murdochc5610432016-08-08 18:44:38 +01001104 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001105 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1106 __ VerifyX87StackDepth(1);
1107 }
1108 __ fstp(0);
1109 __ fld_s(i.InputOperand(0));
1110 }
1111 __ frndint();
1112 __ X87SetRC(0x0000);
1113 break;
1114 }
1115 case kX87Float64Add: {
1116 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1117 __ VerifyX87StackDepth(1);
1118 }
1119 __ X87SetFPUCW(0x027F);
1120 __ fstp(0);
1121 __ fld_d(MemOperand(esp, 0));
1122 __ fld_d(MemOperand(esp, kDoubleSize));
1123 __ faddp();
1124 // Clear stack.
1125 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1126 // Restore the default value of control word.
1127 __ X87SetFPUCW(0x037F);
1128 break;
1129 }
1130 case kX87Float64Sub: {
1131 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1132 __ VerifyX87StackDepth(1);
1133 }
1134 __ X87SetFPUCW(0x027F);
1135 __ fstp(0);
1136 __ fld_d(MemOperand(esp, kDoubleSize));
1137 __ fsub_d(MemOperand(esp, 0));
1138 // Clear stack.
1139 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1140 // Restore the default value of control word.
1141 __ X87SetFPUCW(0x037F);
1142 break;
1143 }
1144 case kX87Float64Mul: {
1145 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1146 __ VerifyX87StackDepth(1);
1147 }
1148 __ X87SetFPUCW(0x027F);
1149 __ fstp(0);
1150 __ fld_d(MemOperand(esp, kDoubleSize));
1151 __ fmul_d(MemOperand(esp, 0));
1152 // Clear stack.
1153 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1154 // Restore the default value of control word.
1155 __ X87SetFPUCW(0x037F);
1156 break;
1157 }
1158 case kX87Float64Div: {
1159 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1160 __ VerifyX87StackDepth(1);
1161 }
1162 __ X87SetFPUCW(0x027F);
1163 __ fstp(0);
1164 __ fld_d(MemOperand(esp, kDoubleSize));
1165 __ fdiv_d(MemOperand(esp, 0));
1166 // Clear stack.
1167 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1168 // Restore the default value of control word.
1169 __ X87SetFPUCW(0x037F);
1170 break;
1171 }
1172 case kX87Float64Mod: {
1173 FrameScope frame_scope(&masm_, StackFrame::MANUAL);
1174 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1175 __ VerifyX87StackDepth(1);
1176 }
1177 __ mov(eax, esp);
1178 __ PrepareCallCFunction(4, eax);
1179 __ fstp(0);
1180 __ fld_d(MemOperand(eax, 0));
1181 __ fstp_d(Operand(esp, 1 * kDoubleSize));
1182 __ fld_d(MemOperand(eax, kDoubleSize));
1183 __ fstp_d(Operand(esp, 0));
1184 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1185 4);
1186 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1187 break;
1188 }
1189 case kX87Float64Max: {
1190 Label check_zero, return_left, return_right;
1191 Condition condition = below;
1192 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1193 __ VerifyX87StackDepth(1);
1194 }
1195 __ fstp(0);
1196 __ fld_d(MemOperand(esp, kDoubleSize));
1197 __ fld_d(MemOperand(esp, 0));
1198 __ fld(1);
1199 __ fld(1);
1200 __ FCmp();
1201 __ j(parity_even, &return_right,
1202 Label::kNear); // At least one NaN, Return right.
1203 __ j(equal, &check_zero, Label::kNear); // left == right.
1204 __ j(condition, &return_left, Label::kNear);
1205 __ jmp(&return_right, Label::kNear);
1206
1207 __ bind(&check_zero);
1208 __ fld(0);
1209 __ fldz();
1210 __ FCmp();
1211 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1212
1213 __ bind(&return_right);
1214 __ fxch();
1215
1216 __ bind(&return_left);
1217 __ fstp(0);
1218 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1219 break;
1220 }
1221 case kX87Float64Min: {
1222 Label check_zero, return_left, return_right;
1223 Condition condition = above;
1224 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1225 __ VerifyX87StackDepth(1);
1226 }
1227 __ fstp(0);
1228 __ fld_d(MemOperand(esp, kDoubleSize));
1229 __ fld_d(MemOperand(esp, 0));
1230 __ fld(1);
1231 __ fld(1);
1232 __ FCmp();
1233 __ j(parity_even, &return_right,
1234 Label::kNear); // At least one NaN, return right value.
1235 __ j(equal, &check_zero, Label::kNear); // left == right.
1236 __ j(condition, &return_left, Label::kNear);
1237 __ jmp(&return_right, Label::kNear);
1238
1239 __ bind(&check_zero);
1240 __ fld(0);
1241 __ fldz();
1242 __ FCmp();
1243 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1244
1245 __ bind(&return_right);
1246 __ fxch();
1247
1248 __ bind(&return_left);
1249 __ fstp(0);
1250 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1251 break;
1252 }
1253 case kX87Float64Abs: {
1254 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1255 __ VerifyX87StackDepth(1);
1256 }
1257 __ fstp(0);
1258 __ fld_d(MemOperand(esp, 0));
1259 __ fabs();
1260 __ lea(esp, Operand(esp, kDoubleSize));
1261 break;
1262 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001263 case kX87Int32ToFloat32: {
1264 InstructionOperand* input = instr->InputAt(0);
1265 DCHECK(input->IsRegister() || input->IsStackSlot());
1266 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1267 __ VerifyX87StackDepth(1);
1268 }
1269 __ fstp(0);
1270 if (input->IsRegister()) {
1271 Register input_reg = i.InputRegister(0);
1272 __ push(input_reg);
1273 __ fild_s(Operand(esp, 0));
1274 __ pop(input_reg);
1275 } else {
1276 __ fild_s(i.InputOperand(0));
1277 }
1278 break;
1279 }
1280 case kX87Uint32ToFloat32: {
1281 InstructionOperand* input = instr->InputAt(0);
1282 DCHECK(input->IsRegister() || input->IsStackSlot());
1283 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1284 __ VerifyX87StackDepth(1);
1285 }
1286 __ fstp(0);
1287 Label msb_set_src;
1288 Label jmp_return;
1289 // Put input integer into eax(tmporarilly)
1290 __ push(eax);
1291 if (input->IsRegister())
1292 __ mov(eax, i.InputRegister(0));
1293 else
1294 __ mov(eax, i.InputOperand(0));
1295
1296 __ test(eax, eax);
1297 __ j(sign, &msb_set_src, Label::kNear);
1298 __ push(eax);
1299 __ fild_s(Operand(esp, 0));
1300 __ pop(eax);
1301
1302 __ jmp(&jmp_return, Label::kNear);
1303 __ bind(&msb_set_src);
1304 // Need another temp reg
1305 __ push(ebx);
1306 __ mov(ebx, eax);
1307 __ shr(eax, 1);
1308 // Recover the least significant bit to avoid rounding errors.
1309 __ and_(ebx, Immediate(1));
1310 __ or_(eax, ebx);
1311 __ push(eax);
1312 __ fild_s(Operand(esp, 0));
1313 __ pop(eax);
1314 __ fld(0);
1315 __ faddp();
1316 // Restore the ebx
1317 __ pop(ebx);
1318 __ bind(&jmp_return);
1319 // Restore the eax
1320 __ pop(eax);
1321 break;
1322 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 case kX87Int32ToFloat64: {
1324 InstructionOperand* input = instr->InputAt(0);
1325 DCHECK(input->IsRegister() || input->IsStackSlot());
1326 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1327 __ VerifyX87StackDepth(1);
1328 }
1329 __ fstp(0);
1330 if (input->IsRegister()) {
1331 Register input_reg = i.InputRegister(0);
1332 __ push(input_reg);
1333 __ fild_s(Operand(esp, 0));
1334 __ pop(input_reg);
1335 } else {
1336 __ fild_s(i.InputOperand(0));
1337 }
1338 break;
1339 }
1340 case kX87Float32ToFloat64: {
1341 InstructionOperand* input = instr->InputAt(0);
Ben Murdochc5610432016-08-08 18:44:38 +01001342 if (input->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001343 __ sub(esp, Immediate(kDoubleSize));
Ben Murdochda12d292016-06-02 14:46:10 +01001344 __ fstp_s(MemOperand(esp, 0));
1345 __ fld_s(MemOperand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001346 __ add(esp, Immediate(kDoubleSize));
1347 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001348 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1350 __ VerifyX87StackDepth(1);
1351 }
1352 __ fstp(0);
1353 __ fld_s(i.InputOperand(0));
1354 }
1355 break;
1356 }
1357 case kX87Uint32ToFloat64: {
1358 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1359 __ VerifyX87StackDepth(1);
1360 }
1361 __ fstp(0);
1362 __ LoadUint32NoSSE2(i.InputRegister(0));
1363 break;
1364 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001365 case kX87Float32ToInt32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001366 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001367 __ fld_s(i.InputOperand(0));
1368 }
1369 __ TruncateX87TOSToI(i.OutputRegister(0));
Ben Murdochc5610432016-08-08 18:44:38 +01001370 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001371 __ fstp(0);
1372 }
1373 break;
1374 }
1375 case kX87Float32ToUint32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001376 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001377 __ fld_s(i.InputOperand(0));
1378 }
1379 Label success;
1380 __ TruncateX87TOSToI(i.OutputRegister(0));
1381 __ test(i.OutputRegister(0), i.OutputRegister(0));
1382 __ j(positive, &success);
1383 __ push(Immediate(INT32_MIN));
1384 __ fild_s(Operand(esp, 0));
1385 __ lea(esp, Operand(esp, kPointerSize));
1386 __ faddp();
1387 __ TruncateX87TOSToI(i.OutputRegister(0));
1388 __ or_(i.OutputRegister(0), Immediate(0x80000000));
1389 __ bind(&success);
Ben Murdochc5610432016-08-08 18:44:38 +01001390 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001391 __ fstp(0);
1392 }
1393 break;
1394 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001395 case kX87Float64ToInt32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001396 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001397 __ fld_d(i.InputOperand(0));
1398 }
1399 __ TruncateX87TOSToI(i.OutputRegister(0));
Ben Murdochc5610432016-08-08 18:44:38 +01001400 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001401 __ fstp(0);
1402 }
1403 break;
1404 }
1405 case kX87Float64ToFloat32: {
1406 InstructionOperand* input = instr->InputAt(0);
Ben Murdochc5610432016-08-08 18:44:38 +01001407 if (input->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 __ sub(esp, Immediate(kDoubleSize));
1409 __ fstp_s(MemOperand(esp, 0));
1410 __ fld_s(MemOperand(esp, 0));
1411 __ add(esp, Immediate(kDoubleSize));
1412 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001413 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1415 __ VerifyX87StackDepth(1);
1416 }
1417 __ fstp(0);
1418 __ fld_d(i.InputOperand(0));
1419 __ sub(esp, Immediate(kDoubleSize));
1420 __ fstp_s(MemOperand(esp, 0));
1421 __ fld_s(MemOperand(esp, 0));
1422 __ add(esp, Immediate(kDoubleSize));
1423 }
1424 break;
1425 }
1426 case kX87Float64ToUint32: {
1427 __ push_imm32(-2147483648);
Ben Murdochc5610432016-08-08 18:44:38 +01001428 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429 __ fld_d(i.InputOperand(0));
1430 }
1431 __ fild_s(Operand(esp, 0));
Ben Murdochda12d292016-06-02 14:46:10 +01001432 __ fld(1);
1433 __ faddp();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001434 __ TruncateX87TOSToI(i.OutputRegister(0));
1435 __ add(esp, Immediate(kInt32Size));
1436 __ add(i.OutputRegister(), Immediate(0x80000000));
Ben Murdochda12d292016-06-02 14:46:10 +01001437 __ fstp(0);
Ben Murdochc5610432016-08-08 18:44:38 +01001438 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001439 __ fstp(0);
1440 }
1441 break;
1442 }
1443 case kX87Float64ExtractHighWord32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001444 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 __ sub(esp, Immediate(kDoubleSize));
1446 __ fst_d(MemOperand(esp, 0));
1447 __ mov(i.OutputRegister(), MemOperand(esp, kDoubleSize / 2));
1448 __ add(esp, Immediate(kDoubleSize));
1449 } else {
1450 InstructionOperand* input = instr->InputAt(0);
1451 USE(input);
Ben Murdochc5610432016-08-08 18:44:38 +01001452 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 __ mov(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
1454 }
1455 break;
1456 }
1457 case kX87Float64ExtractLowWord32: {
Ben Murdochc5610432016-08-08 18:44:38 +01001458 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 __ sub(esp, Immediate(kDoubleSize));
1460 __ fst_d(MemOperand(esp, 0));
1461 __ mov(i.OutputRegister(), MemOperand(esp, 0));
1462 __ add(esp, Immediate(kDoubleSize));
1463 } else {
1464 InstructionOperand* input = instr->InputAt(0);
1465 USE(input);
Ben Murdochc5610432016-08-08 18:44:38 +01001466 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 __ mov(i.OutputRegister(), i.InputOperand(0));
1468 }
1469 break;
1470 }
1471 case kX87Float64InsertHighWord32: {
1472 __ sub(esp, Immediate(kDoubleSize));
1473 __ fstp_d(MemOperand(esp, 0));
1474 __ mov(MemOperand(esp, kDoubleSize / 2), i.InputRegister(1));
1475 __ fld_d(MemOperand(esp, 0));
1476 __ add(esp, Immediate(kDoubleSize));
1477 break;
1478 }
1479 case kX87Float64InsertLowWord32: {
1480 __ sub(esp, Immediate(kDoubleSize));
1481 __ fstp_d(MemOperand(esp, 0));
1482 __ mov(MemOperand(esp, 0), i.InputRegister(1));
1483 __ fld_d(MemOperand(esp, 0));
1484 __ add(esp, Immediate(kDoubleSize));
1485 break;
1486 }
1487 case kX87Float64Sqrt: {
1488 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1489 __ VerifyX87StackDepth(1);
1490 }
1491 __ X87SetFPUCW(0x027F);
1492 __ fstp(0);
1493 __ fld_d(MemOperand(esp, 0));
1494 __ fsqrt();
1495 __ lea(esp, Operand(esp, kDoubleSize));
1496 __ X87SetFPUCW(0x037F);
1497 break;
1498 }
1499 case kX87Float64Round: {
1500 RoundingMode mode =
1501 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1502 // Set the correct round mode in x87 control register
1503 __ X87SetRC((mode << 10));
1504
Ben Murdochc5610432016-08-08 18:44:38 +01001505 if (!instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001506 InstructionOperand* input = instr->InputAt(0);
1507 USE(input);
Ben Murdochc5610432016-08-08 18:44:38 +01001508 DCHECK(input->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1510 __ VerifyX87StackDepth(1);
1511 }
1512 __ fstp(0);
1513 __ fld_d(i.InputOperand(0));
1514 }
1515 __ frndint();
1516 __ X87SetRC(0x0000);
1517 break;
1518 }
1519 case kX87Float64Cmp: {
1520 __ fld_d(MemOperand(esp, kDoubleSize));
1521 __ fld_d(MemOperand(esp, 0));
1522 __ FCmp();
1523 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1524 break;
1525 }
1526 case kX87Movsxbl:
1527 __ movsx_b(i.OutputRegister(), i.MemoryOperand());
1528 break;
1529 case kX87Movzxbl:
1530 __ movzx_b(i.OutputRegister(), i.MemoryOperand());
1531 break;
1532 case kX87Movb: {
1533 size_t index = 0;
1534 Operand operand = i.MemoryOperand(&index);
1535 if (HasImmediateInput(instr, index)) {
1536 __ mov_b(operand, i.InputInt8(index));
1537 } else {
1538 __ mov_b(operand, i.InputRegister(index));
1539 }
1540 break;
1541 }
1542 case kX87Movsxwl:
1543 __ movsx_w(i.OutputRegister(), i.MemoryOperand());
1544 break;
1545 case kX87Movzxwl:
1546 __ movzx_w(i.OutputRegister(), i.MemoryOperand());
1547 break;
1548 case kX87Movw: {
1549 size_t index = 0;
1550 Operand operand = i.MemoryOperand(&index);
1551 if (HasImmediateInput(instr, index)) {
1552 __ mov_w(operand, i.InputInt16(index));
1553 } else {
1554 __ mov_w(operand, i.InputRegister(index));
1555 }
1556 break;
1557 }
1558 case kX87Movl:
1559 if (instr->HasOutput()) {
1560 __ mov(i.OutputRegister(), i.MemoryOperand());
1561 } else {
1562 size_t index = 0;
1563 Operand operand = i.MemoryOperand(&index);
1564 if (HasImmediateInput(instr, index)) {
1565 __ mov(operand, i.InputImmediate(index));
1566 } else {
1567 __ mov(operand, i.InputRegister(index));
1568 }
1569 }
1570 break;
1571 case kX87Movsd: {
1572 if (instr->HasOutput()) {
1573 X87Register output = i.OutputDoubleRegister();
1574 USE(output);
1575 DCHECK(output.code() == 0);
1576 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1577 __ VerifyX87StackDepth(1);
1578 }
1579 __ fstp(0);
1580 __ fld_d(i.MemoryOperand());
1581 } else {
1582 size_t index = 0;
1583 Operand operand = i.MemoryOperand(&index);
1584 __ fst_d(operand);
1585 }
1586 break;
1587 }
1588 case kX87Movss: {
1589 if (instr->HasOutput()) {
1590 X87Register output = i.OutputDoubleRegister();
1591 USE(output);
1592 DCHECK(output.code() == 0);
1593 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1594 __ VerifyX87StackDepth(1);
1595 }
1596 __ fstp(0);
1597 __ fld_s(i.MemoryOperand());
1598 } else {
1599 size_t index = 0;
1600 Operand operand = i.MemoryOperand(&index);
1601 __ fst_s(operand);
1602 }
1603 break;
1604 }
1605 case kX87BitcastFI: {
1606 __ mov(i.OutputRegister(), MemOperand(esp, 0));
1607 __ lea(esp, Operand(esp, kFloatSize));
1608 break;
1609 }
1610 case kX87BitcastIF: {
1611 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1612 __ VerifyX87StackDepth(1);
1613 }
1614 __ fstp(0);
1615 if (instr->InputAt(0)->IsRegister()) {
1616 __ lea(esp, Operand(esp, -kFloatSize));
1617 __ mov(MemOperand(esp, 0), i.InputRegister(0));
1618 __ fld_s(MemOperand(esp, 0));
1619 __ lea(esp, Operand(esp, kFloatSize));
1620 } else {
1621 __ fld_s(i.InputOperand(0));
1622 }
1623 break;
1624 }
1625 case kX87Lea: {
1626 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1627 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1628 // and addressing mode just happens to work out. The "addl"/"subl" forms
1629 // in these cases are faster based on measurements.
1630 if (mode == kMode_MI) {
1631 __ Move(i.OutputRegister(), Immediate(i.InputInt32(0)));
1632 } else if (i.InputRegister(0).is(i.OutputRegister())) {
1633 if (mode == kMode_MRI) {
1634 int32_t constant_summand = i.InputInt32(1);
1635 if (constant_summand > 0) {
1636 __ add(i.OutputRegister(), Immediate(constant_summand));
1637 } else if (constant_summand < 0) {
1638 __ sub(i.OutputRegister(), Immediate(-constant_summand));
1639 }
1640 } else if (mode == kMode_MR1) {
1641 if (i.InputRegister(1).is(i.OutputRegister())) {
1642 __ shl(i.OutputRegister(), 1);
1643 } else {
1644 __ lea(i.OutputRegister(), i.MemoryOperand());
1645 }
1646 } else if (mode == kMode_M2) {
1647 __ shl(i.OutputRegister(), 1);
1648 } else if (mode == kMode_M4) {
1649 __ shl(i.OutputRegister(), 2);
1650 } else if (mode == kMode_M8) {
1651 __ shl(i.OutputRegister(), 3);
1652 } else {
1653 __ lea(i.OutputRegister(), i.MemoryOperand());
1654 }
1655 } else {
1656 __ lea(i.OutputRegister(), i.MemoryOperand());
1657 }
1658 break;
1659 }
1660 case kX87Push:
Ben Murdochc5610432016-08-08 18:44:38 +01001661 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662 auto allocated = AllocatedOperand::cast(*instr->InputAt(0));
1663 if (allocated.representation() == MachineRepresentation::kFloat32) {
1664 __ sub(esp, Immediate(kDoubleSize));
1665 __ fst_s(Operand(esp, 0));
1666 } else {
1667 DCHECK(allocated.representation() == MachineRepresentation::kFloat64);
1668 __ sub(esp, Immediate(kDoubleSize));
1669 __ fst_d(Operand(esp, 0));
1670 }
1671 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001672 } else if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673 auto allocated = AllocatedOperand::cast(*instr->InputAt(0));
1674 if (allocated.representation() == MachineRepresentation::kFloat32) {
1675 __ sub(esp, Immediate(kDoubleSize));
1676 __ fld_s(i.InputOperand(0));
1677 __ fstp_s(MemOperand(esp, 0));
1678 } else {
1679 DCHECK(allocated.representation() == MachineRepresentation::kFloat64);
1680 __ sub(esp, Immediate(kDoubleSize));
1681 __ fld_d(i.InputOperand(0));
1682 __ fstp_d(MemOperand(esp, 0));
1683 }
1684 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1685 } else if (HasImmediateInput(instr, 0)) {
1686 __ push(i.InputImmediate(0));
1687 frame_access_state()->IncreaseSPDelta(1);
1688 } else {
1689 __ push(i.InputOperand(0));
1690 frame_access_state()->IncreaseSPDelta(1);
1691 }
1692 break;
1693 case kX87Poke: {
1694 int const slot = MiscField::decode(instr->opcode());
1695 if (HasImmediateInput(instr, 0)) {
1696 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0));
1697 } else {
1698 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0));
1699 }
1700 break;
1701 }
Ben Murdochc5610432016-08-08 18:44:38 +01001702 case kX87Xchgb: {
1703 size_t index = 0;
1704 Operand operand = i.MemoryOperand(&index);
1705 __ xchg_b(i.InputRegister(index), operand);
1706 break;
1707 }
1708 case kX87Xchgw: {
1709 size_t index = 0;
1710 Operand operand = i.MemoryOperand(&index);
1711 __ xchg_w(i.InputRegister(index), operand);
1712 break;
1713 }
1714 case kX87Xchgl: {
1715 size_t index = 0;
1716 Operand operand = i.MemoryOperand(&index);
1717 __ xchg(i.InputRegister(index), operand);
1718 break;
1719 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001720 case kX87PushFloat32:
1721 __ lea(esp, Operand(esp, -kFloatSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001722 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 __ fld_s(i.InputOperand(0));
1724 __ fstp_s(MemOperand(esp, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01001725 } else if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726 __ fst_s(MemOperand(esp, 0));
1727 } else {
1728 UNREACHABLE();
1729 }
1730 break;
1731 case kX87PushFloat64:
1732 __ lea(esp, Operand(esp, -kDoubleSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001733 if (instr->InputAt(0)->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001734 __ fld_d(i.InputOperand(0));
1735 __ fstp_d(MemOperand(esp, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01001736 } else if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 __ fst_d(MemOperand(esp, 0));
1738 } else {
1739 UNREACHABLE();
1740 }
1741 break;
1742 case kCheckedLoadInt8:
1743 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_b);
1744 break;
1745 case kCheckedLoadUint8:
1746 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_b);
1747 break;
1748 case kCheckedLoadInt16:
1749 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_w);
1750 break;
1751 case kCheckedLoadUint16:
1752 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_w);
1753 break;
1754 case kCheckedLoadWord32:
1755 ASSEMBLE_CHECKED_LOAD_INTEGER(mov);
1756 break;
1757 case kCheckedLoadFloat32:
1758 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_s);
1759 break;
1760 case kCheckedLoadFloat64:
1761 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_d);
1762 break;
1763 case kCheckedStoreWord8:
1764 ASSEMBLE_CHECKED_STORE_INTEGER(mov_b);
1765 break;
1766 case kCheckedStoreWord16:
1767 ASSEMBLE_CHECKED_STORE_INTEGER(mov_w);
1768 break;
1769 case kCheckedStoreWord32:
1770 ASSEMBLE_CHECKED_STORE_INTEGER(mov);
1771 break;
1772 case kCheckedStoreFloat32:
1773 ASSEMBLE_CHECKED_STORE_FLOAT(fst_s);
1774 break;
1775 case kCheckedStoreFloat64:
1776 ASSEMBLE_CHECKED_STORE_FLOAT(fst_d);
1777 break;
1778 case kX87StackCheck: {
1779 ExternalReference const stack_limit =
1780 ExternalReference::address_of_stack_limit(isolate());
1781 __ cmp(esp, Operand::StaticVariable(stack_limit));
1782 break;
1783 }
1784 case kCheckedLoadWord64:
1785 case kCheckedStoreWord64:
1786 UNREACHABLE(); // currently unsupported checked int64 load/store.
1787 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001788 case kAtomicLoadInt8:
1789 case kAtomicLoadUint8:
1790 case kAtomicLoadInt16:
1791 case kAtomicLoadUint16:
1792 case kAtomicLoadWord32:
1793 case kAtomicStoreWord8:
1794 case kAtomicStoreWord16:
1795 case kAtomicStoreWord32:
1796 UNREACHABLE(); // Won't be generated by instruction selector.
1797 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001798 }
Ben Murdochc5610432016-08-08 18:44:38 +01001799 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001800} // NOLINT(readability/fn_size)
1801
1802
1803// Assembles a branch after an instruction.
1804void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1805 X87OperandConverter i(this, instr);
1806 Label::Distance flabel_distance =
1807 branch->fallthru ? Label::kNear : Label::kFar;
Ben Murdochda12d292016-06-02 14:46:10 +01001808
1809 Label done;
1810 Label tlabel_tmp;
1811 Label flabel_tmp;
1812 Label* tlabel = &tlabel_tmp;
1813 Label* flabel = &flabel_tmp;
1814
1815 Label* tlabel_dst = branch->true_label;
1816 Label* flabel_dst = branch->false_label;
1817
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818 switch (branch->condition) {
1819 case kUnorderedEqual:
1820 __ j(parity_even, flabel, flabel_distance);
1821 // Fall through.
1822 case kEqual:
1823 __ j(equal, tlabel);
1824 break;
1825 case kUnorderedNotEqual:
1826 __ j(parity_even, tlabel);
1827 // Fall through.
1828 case kNotEqual:
1829 __ j(not_equal, tlabel);
1830 break;
1831 case kSignedLessThan:
1832 __ j(less, tlabel);
1833 break;
1834 case kSignedGreaterThanOrEqual:
1835 __ j(greater_equal, tlabel);
1836 break;
1837 case kSignedLessThanOrEqual:
1838 __ j(less_equal, tlabel);
1839 break;
1840 case kSignedGreaterThan:
1841 __ j(greater, tlabel);
1842 break;
1843 case kUnsignedLessThan:
1844 __ j(below, tlabel);
1845 break;
1846 case kUnsignedGreaterThanOrEqual:
1847 __ j(above_equal, tlabel);
1848 break;
1849 case kUnsignedLessThanOrEqual:
1850 __ j(below_equal, tlabel);
1851 break;
1852 case kUnsignedGreaterThan:
1853 __ j(above, tlabel);
1854 break;
1855 case kOverflow:
1856 __ j(overflow, tlabel);
1857 break;
1858 case kNotOverflow:
1859 __ j(no_overflow, tlabel);
1860 break;
1861 default:
1862 UNREACHABLE();
1863 break;
1864 }
1865 // Add a jump if not falling through to the next block.
1866 if (!branch->fallthru) __ jmp(flabel);
Ben Murdochda12d292016-06-02 14:46:10 +01001867
1868 __ jmp(&done);
1869 __ bind(&tlabel_tmp);
1870 FlagsMode mode = FlagsModeField::decode(instr->opcode());
1871 if (mode == kFlags_deoptimize) {
1872 int double_register_param_count = 0;
1873 int x87_layout = 0;
1874 for (size_t i = 0; i < instr->InputCount(); i++) {
Ben Murdochc5610432016-08-08 18:44:38 +01001875 if (instr->InputAt(i)->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001876 double_register_param_count++;
1877 }
1878 }
1879 // Currently we use only one X87 register. If double_register_param_count
1880 // is bigger than 1, it means duplicated double register is added to input
1881 // of this instruction.
1882 if (double_register_param_count > 0) {
1883 x87_layout = (0 << 3) | 1;
1884 }
1885 // The layout of x87 register stack is loaded on the top of FPU register
1886 // stack for deoptimization.
1887 __ push(Immediate(x87_layout));
1888 __ fild_s(MemOperand(esp, 0));
1889 __ lea(esp, Operand(esp, kPointerSize));
1890 }
1891 __ jmp(tlabel_dst);
1892 __ bind(&flabel_tmp);
1893 __ jmp(flabel_dst);
1894 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895}
1896
1897
1898void CodeGenerator::AssembleArchJump(RpoNumber target) {
1899 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
1900}
1901
1902
1903// Assembles boolean materializations after an instruction.
1904void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1905 FlagsCondition condition) {
1906 X87OperandConverter i(this, instr);
1907 Label done;
1908
1909 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1910 // last output of the instruction.
1911 Label check;
1912 DCHECK_NE(0u, instr->OutputCount());
1913 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1914 Condition cc = no_condition;
1915 switch (condition) {
1916 case kUnorderedEqual:
1917 __ j(parity_odd, &check, Label::kNear);
1918 __ Move(reg, Immediate(0));
1919 __ jmp(&done, Label::kNear);
1920 // Fall through.
1921 case kEqual:
1922 cc = equal;
1923 break;
1924 case kUnorderedNotEqual:
1925 __ j(parity_odd, &check, Label::kNear);
1926 __ mov(reg, Immediate(1));
1927 __ jmp(&done, Label::kNear);
1928 // Fall through.
1929 case kNotEqual:
1930 cc = not_equal;
1931 break;
1932 case kSignedLessThan:
1933 cc = less;
1934 break;
1935 case kSignedGreaterThanOrEqual:
1936 cc = greater_equal;
1937 break;
1938 case kSignedLessThanOrEqual:
1939 cc = less_equal;
1940 break;
1941 case kSignedGreaterThan:
1942 cc = greater;
1943 break;
1944 case kUnsignedLessThan:
1945 cc = below;
1946 break;
1947 case kUnsignedGreaterThanOrEqual:
1948 cc = above_equal;
1949 break;
1950 case kUnsignedLessThanOrEqual:
1951 cc = below_equal;
1952 break;
1953 case kUnsignedGreaterThan:
1954 cc = above;
1955 break;
1956 case kOverflow:
1957 cc = overflow;
1958 break;
1959 case kNotOverflow:
1960 cc = no_overflow;
1961 break;
1962 default:
1963 UNREACHABLE();
1964 break;
1965 }
1966 __ bind(&check);
1967 if (reg.is_byte_register()) {
1968 // setcc for byte registers (al, bl, cl, dl).
1969 __ setcc(cc, reg);
1970 __ movzx_b(reg, reg);
1971 } else {
1972 // Emit a branch to set a register to either 1 or 0.
1973 Label set;
1974 __ j(cc, &set, Label::kNear);
1975 __ Move(reg, Immediate(0));
1976 __ jmp(&done, Label::kNear);
1977 __ bind(&set);
1978 __ mov(reg, Immediate(1));
1979 }
1980 __ bind(&done);
1981}
1982
1983
1984void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1985 X87OperandConverter i(this, instr);
1986 Register input = i.InputRegister(0);
1987 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1988 __ cmp(input, Immediate(i.InputInt32(index + 0)));
1989 __ j(equal, GetLabel(i.InputRpo(index + 1)));
1990 }
1991 AssembleArchJump(i.InputRpo(1));
1992}
1993
1994
1995void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1996 X87OperandConverter i(this, instr);
1997 Register input = i.InputRegister(0);
1998 size_t const case_count = instr->InputCount() - 2;
1999 Label** cases = zone()->NewArray<Label*>(case_count);
2000 for (size_t index = 0; index < case_count; ++index) {
2001 cases[index] = GetLabel(i.InputRpo(index + 2));
2002 }
2003 Label* const table = AddJumpTable(cases, case_count);
2004 __ cmp(input, Immediate(case_count));
2005 __ j(above_equal, GetLabel(i.InputRpo(1)));
2006 __ jmp(Operand::JumpTable(input, times_4, table));
2007}
2008
Ben Murdochc5610432016-08-08 18:44:38 +01002009CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002010 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
2011 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
2012 isolate(), deoptimization_id, bailout_type);
Ben Murdochc5610432016-08-08 18:44:38 +01002013 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01002015 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016}
2017
2018
2019// The calling convention for JSFunctions on X87 passes arguments on the
2020// stack and the JSFunction and context in EDI and ESI, respectively, thus
2021// the steps of the call look as follows:
2022
2023// --{ before the call instruction }--------------------------------------------
2024// | caller frame |
2025// ^ esp ^ ebp
2026
2027// --{ push arguments and setup ESI, EDI }--------------------------------------
2028// | args + receiver | caller frame |
2029// ^ esp ^ ebp
2030// [edi = JSFunction, esi = context]
2031
2032// --{ call [edi + kCodeEntryOffset] }------------------------------------------
2033// | RET | args + receiver | caller frame |
2034// ^ esp ^ ebp
2035
2036// =={ prologue of called function }============================================
2037// --{ push ebp }---------------------------------------------------------------
2038// | FP | RET | args + receiver | caller frame |
2039// ^ esp ^ ebp
2040
2041// --{ mov ebp, esp }-----------------------------------------------------------
2042// | FP | RET | args + receiver | caller frame |
2043// ^ ebp,esp
2044
2045// --{ push esi }---------------------------------------------------------------
2046// | CTX | FP | RET | args + receiver | caller frame |
2047// ^esp ^ ebp
2048
2049// --{ push edi }---------------------------------------------------------------
2050// | FNC | CTX | FP | RET | args + receiver | caller frame |
2051// ^esp ^ ebp
2052
2053// --{ subi esp, #N }-----------------------------------------------------------
2054// | callee frame | FNC | CTX | FP | RET | args + receiver | caller frame |
2055// ^esp ^ ebp
2056
2057// =={ body of called function }================================================
2058
2059// =={ epilogue of called function }============================================
2060// --{ mov esp, ebp }-----------------------------------------------------------
2061// | FP | RET | args + receiver | caller frame |
2062// ^ esp,ebp
2063
2064// --{ pop ebp }-----------------------------------------------------------
2065// | | RET | args + receiver | caller frame |
2066// ^ esp ^ ebp
2067
2068// --{ ret #A+1 }-----------------------------------------------------------
2069// | | caller frame |
2070// ^ esp ^ ebp
2071
2072
2073// Runtime function calls are accomplished by doing a stub call to the
2074// CEntryStub (a real code object). On X87 passes arguments on the
2075// stack, the number of arguments in EAX, the address of the runtime function
2076// in EBX, and the context in ESI.
2077
2078// --{ before the call instruction }--------------------------------------------
2079// | caller frame |
2080// ^ esp ^ ebp
2081
2082// --{ push arguments and setup EAX, EBX, and ESI }-----------------------------
2083// | args + receiver | caller frame |
2084// ^ esp ^ ebp
2085// [eax = #args, ebx = runtime function, esi = context]
2086
2087// --{ call #CEntryStub }-------------------------------------------------------
2088// | RET | args + receiver | caller frame |
2089// ^ esp ^ ebp
2090
2091// =={ body of runtime function }===============================================
2092
2093// --{ runtime returns }--------------------------------------------------------
2094// | caller frame |
2095// ^ esp ^ ebp
2096
2097// Other custom linkages (e.g. for calling directly into and out of C++) may
2098// need to save callee-saved registers on the stack, which is done in the
2099// function prologue of generated code.
2100
2101// --{ before the call instruction }--------------------------------------------
2102// | caller frame |
2103// ^ esp ^ ebp
2104
2105// --{ set up arguments in registers on stack }---------------------------------
2106// | args | caller frame |
2107// ^ esp ^ ebp
2108// [r0 = arg0, r1 = arg1, ...]
2109
2110// --{ call code }--------------------------------------------------------------
2111// | RET | args | caller frame |
2112// ^ esp ^ ebp
2113
2114// =={ prologue of called function }============================================
2115// --{ push ebp }---------------------------------------------------------------
2116// | FP | RET | args | caller frame |
2117// ^ esp ^ ebp
2118
2119// --{ mov ebp, esp }-----------------------------------------------------------
2120// | FP | RET | args | caller frame |
2121// ^ ebp,esp
2122
2123// --{ save registers }---------------------------------------------------------
2124// | regs | FP | RET | args | caller frame |
2125// ^ esp ^ ebp
2126
2127// --{ subi esp, #N }-----------------------------------------------------------
2128// | callee frame | regs | FP | RET | args | caller frame |
2129// ^esp ^ ebp
2130
2131// =={ body of called function }================================================
2132
2133// =={ epilogue of called function }============================================
2134// --{ restore registers }------------------------------------------------------
2135// | regs | FP | RET | args | caller frame |
2136// ^ esp ^ ebp
2137
2138// --{ mov esp, ebp }-----------------------------------------------------------
2139// | FP | RET | args | caller frame |
2140// ^ esp,ebp
2141
2142// --{ pop ebp }----------------------------------------------------------------
2143// | RET | args | caller frame |
2144// ^ esp ^ ebp
2145
Ben Murdochc5610432016-08-08 18:44:38 +01002146void CodeGenerator::FinishFrame(Frame* frame) {
2147 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
2148 const RegList saves = descriptor->CalleeSavedRegisters();
2149 if (saves != 0) { // Save callee-saved registers.
2150 DCHECK(!info()->is_osr());
2151 int pushed = 0;
2152 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2153 if (!((1 << i) & saves)) continue;
2154 ++pushed;
2155 }
2156 frame->AllocateSavedCalleeRegisterSlots(pushed);
2157 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158
Ben Murdochc5610432016-08-08 18:44:38 +01002159 // Initailize FPU state.
2160 __ fninit();
2161 __ fld1();
2162}
2163
2164void CodeGenerator::AssembleConstructFrame() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01002166 if (frame_access_state()->has_frame()) {
2167 if (descriptor->IsCFunctionCall()) {
2168 __ push(ebp);
2169 __ mov(ebp, esp);
2170 } else if (descriptor->IsJSFunctionCall()) {
2171 __ Prologue(this->info()->GeneratePreagedPrologue());
2172 } else {
2173 __ StubPrologue(info()->GetOutputStackFrameType());
2174 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175 }
Ben Murdochc5610432016-08-08 18:44:38 +01002176
2177 int shrink_slots = frame()->GetSpillSlotCount();
2178
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002179 if (info()->is_osr()) {
2180 // TurboFan OSR-compiled functions cannot be entered directly.
2181 __ Abort(kShouldNotDirectlyEnterOsrFunction);
2182
2183 // Unoptimized code jumps directly to this entrypoint while the unoptimized
2184 // frame is still on the stack. Optimized code uses OSR values directly from
2185 // the unoptimized frame. Thus, all that needs to be done is to allocate the
2186 // remaining stack slots.
2187 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
2188 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01002189 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdochda12d292016-06-02 14:46:10 +01002190
2191 // Initailize FPU state.
2192 __ fninit();
2193 __ fld1();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194 }
2195
2196 const RegList saves = descriptor->CalleeSavedRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01002197 if (shrink_slots > 0) {
2198 __ sub(esp, Immediate(shrink_slots * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002199 }
2200
2201 if (saves != 0) { // Save callee-saved registers.
2202 DCHECK(!info()->is_osr());
2203 int pushed = 0;
2204 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2205 if (!((1 << i) & saves)) continue;
2206 __ push(Register::from_code(i));
2207 ++pushed;
2208 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002209 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210}
2211
2212
2213void CodeGenerator::AssembleReturn() {
2214 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
2215
2216 // Clear the FPU stack only if there is no return value in the stack.
2217 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
2218 __ VerifyX87StackDepth(1);
2219 }
2220 bool clear_stack = true;
2221 for (int i = 0; i < descriptor->ReturnCount(); i++) {
2222 MachineRepresentation rep = descriptor->GetReturnType(i).representation();
2223 LinkageLocation loc = descriptor->GetReturnLocation(i);
2224 if (IsFloatingPoint(rep) && loc == LinkageLocation::ForRegister(0)) {
2225 clear_stack = false;
2226 break;
2227 }
2228 }
2229 if (clear_stack) __ fstp(0);
2230
2231 int pop_count = static_cast<int>(descriptor->StackParameterCount());
2232 const RegList saves = descriptor->CalleeSavedRegisters();
2233 // Restore registers.
2234 if (saves != 0) {
2235 for (int i = 0; i < Register::kNumRegisters; i++) {
2236 if (!((1 << i) & saves)) continue;
2237 __ pop(Register::from_code(i));
2238 }
2239 }
2240
2241 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002242 AssembleDeconstructFrame();
2243 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 // Canonicalize JSFunction return sites for now.
2245 if (return_label_.is_bound()) {
2246 __ jmp(&return_label_);
2247 return;
2248 } else {
2249 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01002250 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 }
2252 }
2253 if (pop_count == 0) {
2254 __ ret(0);
2255 } else {
2256 __ Ret(pop_count * kPointerSize, ebx);
2257 }
2258}
2259
2260
2261void CodeGenerator::AssembleMove(InstructionOperand* source,
2262 InstructionOperand* destination) {
2263 X87OperandConverter g(this, nullptr);
2264 // Dispatch on the source and destination operand kinds. Not all
2265 // combinations are possible.
2266 if (source->IsRegister()) {
2267 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2268 Register src = g.ToRegister(source);
2269 Operand dst = g.ToOperand(destination);
2270 __ mov(dst, src);
2271 } else if (source->IsStackSlot()) {
2272 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2273 Operand src = g.ToOperand(source);
2274 if (destination->IsRegister()) {
2275 Register dst = g.ToRegister(destination);
2276 __ mov(dst, src);
2277 } else {
2278 Operand dst = g.ToOperand(destination);
2279 __ push(src);
2280 __ pop(dst);
2281 }
2282 } else if (source->IsConstant()) {
2283 Constant src_constant = g.ToConstant(source);
2284 if (src_constant.type() == Constant::kHeapObject) {
2285 Handle<HeapObject> src = src_constant.ToHeapObject();
Ben Murdochda12d292016-06-02 14:46:10 +01002286 int slot;
2287 if (IsMaterializableFromFrame(src, &slot)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002288 if (destination->IsRegister()) {
2289 Register dst = g.ToRegister(destination);
Ben Murdochda12d292016-06-02 14:46:10 +01002290 __ mov(dst, g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291 } else {
2292 DCHECK(destination->IsStackSlot());
2293 Operand dst = g.ToOperand(destination);
Ben Murdochda12d292016-06-02 14:46:10 +01002294 __ push(g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002295 __ pop(dst);
2296 }
2297 } else if (destination->IsRegister()) {
2298 Register dst = g.ToRegister(destination);
2299 __ LoadHeapObject(dst, src);
2300 } else {
2301 DCHECK(destination->IsStackSlot());
2302 Operand dst = g.ToOperand(destination);
2303 AllowDeferredHandleDereference embedding_raw_address;
2304 if (isolate()->heap()->InNewSpace(*src)) {
2305 __ PushHeapObject(src);
2306 __ pop(dst);
2307 } else {
2308 __ mov(dst, src);
2309 }
2310 }
2311 } else if (destination->IsRegister()) {
2312 Register dst = g.ToRegister(destination);
2313 __ Move(dst, g.ToImmediate(source));
2314 } else if (destination->IsStackSlot()) {
2315 Operand dst = g.ToOperand(destination);
2316 __ Move(dst, g.ToImmediate(source));
2317 } else if (src_constant.type() == Constant::kFloat32) {
2318 // TODO(turbofan): Can we do better here?
2319 uint32_t src = bit_cast<uint32_t>(src_constant.ToFloat32());
Ben Murdochc5610432016-08-08 18:44:38 +01002320 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321 __ sub(esp, Immediate(kInt32Size));
2322 __ mov(MemOperand(esp, 0), Immediate(src));
2323 // always only push one value into the x87 stack.
2324 __ fstp(0);
2325 __ fld_s(MemOperand(esp, 0));
2326 __ add(esp, Immediate(kInt32Size));
2327 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002328 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002329 Operand dst = g.ToOperand(destination);
2330 __ Move(dst, Immediate(src));
2331 }
2332 } else {
2333 DCHECK_EQ(Constant::kFloat64, src_constant.type());
2334 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64());
2335 uint32_t lower = static_cast<uint32_t>(src);
2336 uint32_t upper = static_cast<uint32_t>(src >> 32);
Ben Murdochc5610432016-08-08 18:44:38 +01002337 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002338 __ sub(esp, Immediate(kDoubleSize));
2339 __ mov(MemOperand(esp, 0), Immediate(lower));
2340 __ mov(MemOperand(esp, kInt32Size), Immediate(upper));
2341 // always only push one value into the x87 stack.
2342 __ fstp(0);
2343 __ fld_d(MemOperand(esp, 0));
2344 __ add(esp, Immediate(kDoubleSize));
2345 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002346 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002347 Operand dst0 = g.ToOperand(destination);
2348 Operand dst1 = g.HighOperand(destination);
2349 __ Move(dst0, Immediate(lower));
2350 __ Move(dst1, Immediate(upper));
2351 }
2352 }
Ben Murdochc5610432016-08-08 18:44:38 +01002353 } else if (source->IsFPRegister()) {
2354 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002355 Operand dst = g.ToOperand(destination);
2356 auto allocated = AllocatedOperand::cast(*source);
2357 switch (allocated.representation()) {
2358 case MachineRepresentation::kFloat32:
2359 __ fst_s(dst);
2360 break;
2361 case MachineRepresentation::kFloat64:
2362 __ fst_d(dst);
2363 break;
2364 default:
2365 UNREACHABLE();
2366 }
Ben Murdochc5610432016-08-08 18:44:38 +01002367 } else if (source->IsFPStackSlot()) {
2368 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002369 Operand src = g.ToOperand(source);
2370 auto allocated = AllocatedOperand::cast(*source);
Ben Murdochc5610432016-08-08 18:44:38 +01002371 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002372 // always only push one value into the x87 stack.
2373 __ fstp(0);
2374 switch (allocated.representation()) {
2375 case MachineRepresentation::kFloat32:
2376 __ fld_s(src);
2377 break;
2378 case MachineRepresentation::kFloat64:
2379 __ fld_d(src);
2380 break;
2381 default:
2382 UNREACHABLE();
2383 }
2384 } else {
2385 Operand dst = g.ToOperand(destination);
2386 switch (allocated.representation()) {
2387 case MachineRepresentation::kFloat32:
2388 __ fld_s(src);
2389 __ fstp_s(dst);
2390 break;
2391 case MachineRepresentation::kFloat64:
2392 __ fld_d(src);
2393 __ fstp_d(dst);
2394 break;
2395 default:
2396 UNREACHABLE();
2397 }
2398 }
2399 } else {
2400 UNREACHABLE();
2401 }
2402}
2403
2404
2405void CodeGenerator::AssembleSwap(InstructionOperand* source,
2406 InstructionOperand* destination) {
2407 X87OperandConverter g(this, nullptr);
2408 // Dispatch on the source and destination operand kinds. Not all
2409 // combinations are possible.
2410 if (source->IsRegister() && destination->IsRegister()) {
2411 // Register-register.
2412 Register src = g.ToRegister(source);
2413 Register dst = g.ToRegister(destination);
2414 __ xchg(dst, src);
2415 } else if (source->IsRegister() && destination->IsStackSlot()) {
2416 // Register-memory.
2417 __ xchg(g.ToRegister(source), g.ToOperand(destination));
2418 } else if (source->IsStackSlot() && destination->IsStackSlot()) {
2419 // Memory-memory.
2420 Operand dst1 = g.ToOperand(destination);
2421 __ push(dst1);
2422 frame_access_state()->IncreaseSPDelta(1);
2423 Operand src1 = g.ToOperand(source);
2424 __ push(src1);
2425 Operand dst2 = g.ToOperand(destination);
2426 __ pop(dst2);
2427 frame_access_state()->IncreaseSPDelta(-1);
2428 Operand src2 = g.ToOperand(source);
2429 __ pop(src2);
Ben Murdochc5610432016-08-08 18:44:38 +01002430 } else if (source->IsFPRegister() && destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002431 UNREACHABLE();
Ben Murdochc5610432016-08-08 18:44:38 +01002432 } else if (source->IsFPRegister() && destination->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433 auto allocated = AllocatedOperand::cast(*source);
2434 switch (allocated.representation()) {
2435 case MachineRepresentation::kFloat32:
2436 __ fld_s(g.ToOperand(destination));
2437 __ fxch();
2438 __ fstp_s(g.ToOperand(destination));
2439 break;
2440 case MachineRepresentation::kFloat64:
2441 __ fld_d(g.ToOperand(destination));
2442 __ fxch();
2443 __ fstp_d(g.ToOperand(destination));
2444 break;
2445 default:
2446 UNREACHABLE();
2447 }
Ben Murdochc5610432016-08-08 18:44:38 +01002448 } else if (source->IsFPStackSlot() && destination->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449 auto allocated = AllocatedOperand::cast(*source);
2450 switch (allocated.representation()) {
2451 case MachineRepresentation::kFloat32:
2452 __ fld_s(g.ToOperand(source));
2453 __ fld_s(g.ToOperand(destination));
2454 __ fstp_s(g.ToOperand(source));
2455 __ fstp_s(g.ToOperand(destination));
2456 break;
2457 case MachineRepresentation::kFloat64:
2458 __ fld_d(g.ToOperand(source));
2459 __ fld_d(g.ToOperand(destination));
2460 __ fstp_d(g.ToOperand(source));
2461 __ fstp_d(g.ToOperand(destination));
2462 break;
2463 default:
2464 UNREACHABLE();
2465 }
2466 } else {
2467 // No other combinations are possible.
2468 UNREACHABLE();
2469 }
2470}
2471
2472
2473void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2474 for (size_t index = 0; index < target_count; ++index) {
2475 __ dd(targets[index]);
2476 }
2477}
2478
2479
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002480void CodeGenerator::EnsureSpaceForLazyDeopt() {
2481 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2482 return;
2483 }
2484
2485 int space_needed = Deoptimizer::patch_size();
2486 // Ensure that we have enough space after the previous lazy-bailout
2487 // instruction for patching the code here.
2488 int current_pc = masm()->pc_offset();
2489 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2490 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2491 __ Nop(padding_size);
2492 }
2493}
2494
2495#undef __
2496
2497} // namespace compiler
2498} // namespace internal
2499} // namespace v8