blob: da7fdb481be90c60b97d29716cf96a428593b1ff [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010012#include "src/frames.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/x87/assembler-x87.h"
14#include "src/x87/frames-x87.h"
15#include "src/x87/macro-assembler-x87.h"
16
17namespace v8 {
18namespace internal {
19namespace compiler {
20
21#define __ masm()->
22
23
24// Adds X87 specific methods for decoding operands.
25class X87OperandConverter : public InstructionOperandConverter {
26 public:
27 X87OperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
30 Operand InputOperand(size_t index, int extra = 0) {
31 return ToOperand(instr_->InputAt(index), extra);
32 }
33
34 Immediate InputImmediate(size_t index) {
35 return ToImmediate(instr_->InputAt(index));
36 }
37
38 Operand OutputOperand() { return ToOperand(instr_->Output()); }
39
40 Operand ToOperand(InstructionOperand* op, int extra = 0) {
41 if (op->IsRegister()) {
42 DCHECK(extra == 0);
43 return Operand(ToRegister(op));
44 }
45 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010046 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 }
48
Ben Murdochda12d292016-06-02 14:46:10 +010049 Operand SlotToOperand(int slot, int extra = 0) {
50 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
51 return Operand(offset.from_stack_pointer() ? esp : ebp,
52 offset.offset() + extra);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 }
54
55 Operand HighOperand(InstructionOperand* op) {
56 DCHECK(op->IsDoubleStackSlot());
57 return ToOperand(op, kPointerSize);
58 }
59
60 Immediate ToImmediate(InstructionOperand* operand) {
61 Constant constant = ToConstant(operand);
62 switch (constant.type()) {
63 case Constant::kInt32:
64 return Immediate(constant.ToInt32());
65 case Constant::kFloat32:
66 return Immediate(
67 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
68 case Constant::kFloat64:
69 return Immediate(
70 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
71 case Constant::kExternalReference:
72 return Immediate(constant.ToExternalReference());
73 case Constant::kHeapObject:
74 return Immediate(constant.ToHeapObject());
75 case Constant::kInt64:
76 break;
77 case Constant::kRpoNumber:
78 return Immediate::CodeRelativeOffset(ToLabel(operand));
79 }
80 UNREACHABLE();
81 return Immediate(-1);
82 }
83
84 static size_t NextOffset(size_t* offset) {
85 size_t i = *offset;
86 (*offset)++;
87 return i;
88 }
89
90 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) {
91 STATIC_ASSERT(0 == static_cast<int>(times_1));
92 STATIC_ASSERT(1 == static_cast<int>(times_2));
93 STATIC_ASSERT(2 == static_cast<int>(times_4));
94 STATIC_ASSERT(3 == static_cast<int>(times_8));
95 int scale = static_cast<int>(mode - one);
96 DCHECK(scale >= 0 && scale < 4);
97 return static_cast<ScaleFactor>(scale);
98 }
99
100 Operand MemoryOperand(size_t* offset) {
101 AddressingMode mode = AddressingModeField::decode(instr_->opcode());
102 switch (mode) {
103 case kMode_MR: {
104 Register base = InputRegister(NextOffset(offset));
105 int32_t disp = 0;
106 return Operand(base, disp);
107 }
108 case kMode_MRI: {
109 Register base = InputRegister(NextOffset(offset));
110 int32_t disp = InputInt32(NextOffset(offset));
111 return Operand(base, disp);
112 }
113 case kMode_MR1:
114 case kMode_MR2:
115 case kMode_MR4:
116 case kMode_MR8: {
117 Register base = InputRegister(NextOffset(offset));
118 Register index = InputRegister(NextOffset(offset));
119 ScaleFactor scale = ScaleFor(kMode_MR1, mode);
120 int32_t disp = 0;
121 return Operand(base, index, scale, disp);
122 }
123 case kMode_MR1I:
124 case kMode_MR2I:
125 case kMode_MR4I:
126 case kMode_MR8I: {
127 Register base = InputRegister(NextOffset(offset));
128 Register index = InputRegister(NextOffset(offset));
129 ScaleFactor scale = ScaleFor(kMode_MR1I, mode);
130 int32_t disp = InputInt32(NextOffset(offset));
131 return Operand(base, index, scale, disp);
132 }
133 case kMode_M1:
134 case kMode_M2:
135 case kMode_M4:
136 case kMode_M8: {
137 Register index = InputRegister(NextOffset(offset));
138 ScaleFactor scale = ScaleFor(kMode_M1, mode);
139 int32_t disp = 0;
140 return Operand(index, scale, disp);
141 }
142 case kMode_M1I:
143 case kMode_M2I:
144 case kMode_M4I:
145 case kMode_M8I: {
146 Register index = InputRegister(NextOffset(offset));
147 ScaleFactor scale = ScaleFor(kMode_M1I, mode);
148 int32_t disp = InputInt32(NextOffset(offset));
149 return Operand(index, scale, disp);
150 }
151 case kMode_MI: {
152 int32_t disp = InputInt32(NextOffset(offset));
153 return Operand(Immediate(disp));
154 }
155 case kMode_None:
156 UNREACHABLE();
157 return Operand(no_reg, 0);
158 }
159 UNREACHABLE();
160 return Operand(no_reg, 0);
161 }
162
163 Operand MemoryOperand(size_t first_input = 0) {
164 return MemoryOperand(&first_input);
165 }
166};
167
168
169namespace {
170
171bool HasImmediateInput(Instruction* instr, size_t index) {
172 return instr->InputAt(index)->IsImmediate();
173}
174
175
176class OutOfLineLoadInteger final : public OutOfLineCode {
177 public:
178 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
179 : OutOfLineCode(gen), result_(result) {}
180
181 void Generate() final { __ xor_(result_, result_); }
182
183 private:
184 Register const result_;
185};
186
187
188class OutOfLineLoadFloat final : public OutOfLineCode {
189 public:
190 OutOfLineLoadFloat(CodeGenerator* gen, X87Register result)
191 : OutOfLineCode(gen), result_(result) {}
192
193 void Generate() final {
194 DCHECK(result_.code() == 0);
195 USE(result_);
196 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
197 __ VerifyX87StackDepth(1);
198 }
199 __ fstp(0);
200 __ push(Immediate(0xffffffff));
201 __ push(Immediate(0x7fffffff));
202 __ fld_d(MemOperand(esp, 0));
203 __ lea(esp, Operand(esp, kDoubleSize));
204 }
205
206 private:
207 X87Register const result_;
208};
209
210
211class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
212 public:
213 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result,
214 X87Register input)
215 : OutOfLineCode(gen), result_(result), input_(input) {}
216
217 void Generate() final {
218 UNIMPLEMENTED();
219 USE(result_);
220 USE(input_);
221 }
222
223 private:
224 Register const result_;
225 X87Register const input_;
226};
227
228
229class OutOfLineRecordWrite final : public OutOfLineCode {
230 public:
231 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand,
232 Register value, Register scratch0, Register scratch1,
233 RecordWriteMode mode)
234 : OutOfLineCode(gen),
235 object_(object),
236 operand_(operand),
237 value_(value),
238 scratch0_(scratch0),
239 scratch1_(scratch1),
240 mode_(mode) {}
241
242 void Generate() final {
243 if (mode_ > RecordWriteMode::kValueIsPointer) {
244 __ JumpIfSmi(value_, exit());
245 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100246 __ CheckPageFlag(value_, scratch0_,
247 MemoryChunk::kPointersToHereAreInterestingMask, zero,
248 exit());
249 RememberedSetAction const remembered_set_action =
250 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
251 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 SaveFPRegsMode const save_fp_mode =
253 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
254 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100255 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 __ lea(scratch1_, operand_);
257 __ CallStub(&stub);
258 }
259
260 private:
261 Register const object_;
262 Operand const operand_;
263 Register const value_;
264 Register const scratch0_;
265 Register const scratch1_;
266 RecordWriteMode const mode_;
267};
268
269} // namespace
270
271
272#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \
273 do { \
274 auto result = i.OutputDoubleRegister(); \
275 auto offset = i.InputRegister(0); \
276 DCHECK(result.code() == 0); \
277 if (instr->InputAt(1)->IsRegister()) { \
278 __ cmp(offset, i.InputRegister(1)); \
279 } else { \
280 __ cmp(offset, i.InputImmediate(1)); \
281 } \
282 OutOfLineCode* ool = new (zone()) OutOfLineLoadFloat(this, result); \
283 __ j(above_equal, ool->entry()); \
284 __ fstp(0); \
285 __ asm_instr(i.MemoryOperand(2)); \
286 __ bind(ool->exit()); \
287 } while (false)
288
289
290#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
291 do { \
292 auto result = i.OutputRegister(); \
293 auto offset = i.InputRegister(0); \
294 if (instr->InputAt(1)->IsRegister()) { \
295 __ cmp(offset, i.InputRegister(1)); \
296 } else { \
297 __ cmp(offset, i.InputImmediate(1)); \
298 } \
299 OutOfLineCode* ool = new (zone()) OutOfLineLoadInteger(this, result); \
300 __ j(above_equal, ool->entry()); \
301 __ asm_instr(result, i.MemoryOperand(2)); \
302 __ bind(ool->exit()); \
303 } while (false)
304
305
306#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \
307 do { \
308 auto offset = i.InputRegister(0); \
309 if (instr->InputAt(1)->IsRegister()) { \
310 __ cmp(offset, i.InputRegister(1)); \
311 } else { \
312 __ cmp(offset, i.InputImmediate(1)); \
313 } \
314 Label done; \
315 DCHECK(i.InputDoubleRegister(2).code() == 0); \
316 __ j(above_equal, &done, Label::kNear); \
317 __ asm_instr(i.MemoryOperand(3)); \
318 __ bind(&done); \
319 } while (false)
320
321
322#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
323 do { \
324 auto offset = i.InputRegister(0); \
325 if (instr->InputAt(1)->IsRegister()) { \
326 __ cmp(offset, i.InputRegister(1)); \
327 } else { \
328 __ cmp(offset, i.InputImmediate(1)); \
329 } \
330 Label done; \
331 __ j(above_equal, &done, Label::kNear); \
332 if (instr->InputAt(2)->IsRegister()) { \
333 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \
334 } else { \
335 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \
336 } \
337 __ bind(&done); \
338 } while (false)
339
Ben Murdochda12d292016-06-02 14:46:10 +0100340#define ASSEMBLE_COMPARE(asm_instr) \
341 do { \
342 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \
343 size_t index = 0; \
344 Operand left = i.MemoryOperand(&index); \
345 if (HasImmediateInput(instr, index)) { \
346 __ asm_instr(left, i.InputImmediate(index)); \
347 } else { \
348 __ asm_instr(left, i.InputRegister(index)); \
349 } \
350 } else { \
351 if (HasImmediateInput(instr, 1)) { \
352 if (instr->InputAt(0)->IsRegister()) { \
353 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \
354 } else { \
355 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \
356 } \
357 } else { \
358 if (instr->InputAt(1)->IsRegister()) { \
359 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \
360 } else { \
361 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \
362 } \
363 } \
364 } \
365 } while (0)
366
367void CodeGenerator::AssembleDeconstructFrame() {
368 __ mov(esp, ebp);
369 __ pop(ebp);
370}
371
372// For insert fninit/fld1 instructions after the Prologue
373thread_local bool is_block_0 = false;
374
375void CodeGenerator::AssembleSetupStackPointer() { is_block_0 = true; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376
377void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
378 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
379 if (sp_slot_delta > 0) {
380 __ add(esp, Immediate(sp_slot_delta * kPointerSize));
381 }
382 frame_access_state()->SetFrameAccessToDefault();
383}
384
385
386void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
387 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
388 if (sp_slot_delta < 0) {
389 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize));
390 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
391 }
Ben Murdochda12d292016-06-02 14:46:10 +0100392 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000393 __ mov(ebp, MemOperand(ebp, 0));
394 }
395 frame_access_state()->SetFrameAccessToSP();
396}
397
Ben Murdochda12d292016-06-02 14:46:10 +0100398void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
399 Register, Register,
400 Register) {
401 // There are not enough temp registers left on ia32 for a call instruction
402 // so we pick some scratch registers and save/restore them manually here.
403 int scratch_count = 3;
404 Register scratch1 = ebx;
405 Register scratch2 = ecx;
406 Register scratch3 = edx;
407 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
408 Label done;
409
410 // Check if current frame is an arguments adaptor frame.
411 __ cmp(Operand(ebp, StandardFrameConstants::kContextOffset),
412 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
413 __ j(not_equal, &done, Label::kNear);
414
415 __ push(scratch1);
416 __ push(scratch2);
417 __ push(scratch3);
418
419 // Load arguments count from current arguments adaptor frame (note, it
420 // does not include receiver).
421 Register caller_args_count_reg = scratch1;
422 __ mov(caller_args_count_reg,
423 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
424 __ SmiUntag(caller_args_count_reg);
425
426 ParameterCount callee_args_count(args_reg);
427 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
428 scratch3, ReturnAddressState::kOnStack, scratch_count);
429 __ pop(scratch3);
430 __ pop(scratch2);
431 __ pop(scratch1);
432
433 __ bind(&done);
434}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435
436// Assembles an instruction after register allocation, producing machine code.
437void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
438 X87OperandConverter i(this, instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100439 InstructionCode opcode = instr->opcode();
440 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000441
Ben Murdochda12d292016-06-02 14:46:10 +0100442 // Workaround for CL #35139 (https://codereview.chromium.org/1775323002)
443 if (is_block_0) {
444 __ fninit();
445 __ fld1();
446 is_block_0 = false;
447 }
448
449 switch (arch_opcode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000450 case kArchCallCodeObject: {
451 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
452 __ VerifyX87StackDepth(1);
453 }
454 __ fstp(0);
455 EnsureSpaceForLazyDeopt();
456 if (HasImmediateInput(instr, 0)) {
457 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
458 __ call(code, RelocInfo::CODE_TARGET);
459 } else {
460 Register reg = i.InputRegister(0);
461 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
462 __ call(reg);
463 }
464 RecordCallPosition(instr);
465 bool double_result =
466 instr->HasOutput() && instr->Output()->IsDoubleRegister();
467 if (double_result) {
468 __ lea(esp, Operand(esp, -kDoubleSize));
469 __ fstp_d(Operand(esp, 0));
470 }
471 __ fninit();
472 if (double_result) {
473 __ fld_d(Operand(esp, 0));
474 __ lea(esp, Operand(esp, kDoubleSize));
475 } else {
476 __ fld1();
477 }
478 frame_access_state()->ClearSPDelta();
479 break;
480 }
Ben Murdochda12d292016-06-02 14:46:10 +0100481 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 case kArchTailCallCodeObject: {
483 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
484 __ VerifyX87StackDepth(1);
485 }
486 __ fstp(0);
487 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
488 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100489 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
490 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
491 no_reg, no_reg, no_reg);
492 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 if (HasImmediateInput(instr, 0)) {
494 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
495 __ jmp(code, RelocInfo::CODE_TARGET);
496 } else {
497 Register reg = i.InputRegister(0);
498 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
499 __ jmp(reg);
500 }
501 frame_access_state()->ClearSPDelta();
502 break;
503 }
504 case kArchCallJSFunction: {
505 EnsureSpaceForLazyDeopt();
506 Register func = i.InputRegister(0);
507 if (FLAG_debug_code) {
508 // Check the function's context matches the context argument.
509 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
510 __ Assert(equal, kWrongFunctionContext);
511 }
512 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
513 __ VerifyX87StackDepth(1);
514 }
515 __ fstp(0);
516 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset));
517 RecordCallPosition(instr);
518 bool double_result =
519 instr->HasOutput() && instr->Output()->IsDoubleRegister();
520 if (double_result) {
521 __ lea(esp, Operand(esp, -kDoubleSize));
522 __ fstp_d(Operand(esp, 0));
523 }
524 __ fninit();
525 if (double_result) {
526 __ fld_d(Operand(esp, 0));
527 __ lea(esp, Operand(esp, kDoubleSize));
528 } else {
529 __ fld1();
530 }
531 frame_access_state()->ClearSPDelta();
532 break;
533 }
Ben Murdochda12d292016-06-02 14:46:10 +0100534 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535 case kArchTailCallJSFunction: {
536 Register func = i.InputRegister(0);
537 if (FLAG_debug_code) {
538 // Check the function's context matches the context argument.
539 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
540 __ Assert(equal, kWrongFunctionContext);
541 }
542 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
543 __ VerifyX87StackDepth(1);
544 }
545 __ fstp(0);
546 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
547 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100548 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
549 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
550 no_reg, no_reg, no_reg);
551 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
553 frame_access_state()->ClearSPDelta();
554 break;
555 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 case kArchPrepareCallCFunction: {
557 // Frame alignment requires using FP-relative frame addressing.
558 frame_access_state()->SetFrameAccessToFP();
559 int const num_parameters = MiscField::decode(instr->opcode());
560 __ PrepareCallCFunction(num_parameters, i.TempRegister(0));
561 break;
562 }
563 case kArchPrepareTailCall:
564 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
565 break;
566 case kArchCallCFunction: {
567 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
568 __ VerifyX87StackDepth(1);
569 }
570 __ fstp(0);
571 int const num_parameters = MiscField::decode(instr->opcode());
572 if (HasImmediateInput(instr, 0)) {
573 ExternalReference ref = i.InputExternalReference(0);
574 __ CallCFunction(ref, num_parameters);
575 } else {
576 Register func = i.InputRegister(0);
577 __ CallCFunction(func, num_parameters);
578 }
579 bool double_result =
580 instr->HasOutput() && instr->Output()->IsDoubleRegister();
581 if (double_result) {
582 __ lea(esp, Operand(esp, -kDoubleSize));
583 __ fstp_d(Operand(esp, 0));
584 }
585 __ fninit();
586 if (double_result) {
587 __ fld_d(Operand(esp, 0));
588 __ lea(esp, Operand(esp, kDoubleSize));
589 } else {
590 __ fld1();
591 }
592 frame_access_state()->SetFrameAccessToDefault();
593 frame_access_state()->ClearSPDelta();
594 break;
595 }
596 case kArchJmp:
597 AssembleArchJump(i.InputRpo(0));
598 break;
599 case kArchLookupSwitch:
600 AssembleArchLookupSwitch(instr);
601 break;
602 case kArchTableSwitch:
603 AssembleArchTableSwitch(instr);
604 break;
605 case kArchNop:
606 case kArchThrowTerminator:
607 // don't emit code for nops.
608 break;
609 case kArchDeoptimize: {
610 int deopt_state_id =
611 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
612 int double_register_param_count = 0;
613 int x87_layout = 0;
614 for (size_t i = 0; i < instr->InputCount(); i++) {
615 if (instr->InputAt(i)->IsDoubleRegister()) {
616 double_register_param_count++;
617 }
618 }
619 // Currently we use only one X87 register. If double_register_param_count
620 // is bigger than 1, it means duplicated double register is added to input
621 // of this instruction.
622 if (double_register_param_count > 0) {
623 x87_layout = (0 << 3) | 1;
624 }
625 // The layout of x87 register stack is loaded on the top of FPU register
626 // stack for deoptimization.
627 __ push(Immediate(x87_layout));
628 __ fild_s(MemOperand(esp, 0));
629 __ lea(esp, Operand(esp, kPointerSize));
630
631 Deoptimizer::BailoutType bailout_type =
632 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
633 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
634 break;
635 }
636 case kArchRet:
637 AssembleReturn();
638 break;
639 case kArchFramePointer:
640 __ mov(i.OutputRegister(), ebp);
641 break;
642 case kArchStackPointer:
643 __ mov(i.OutputRegister(), esp);
644 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100645 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100646 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100647 __ mov(i.OutputRegister(), Operand(ebp, 0));
648 } else {
649 __ mov(i.OutputRegister(), ebp);
650 }
651 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000652 case kArchTruncateDoubleToI: {
653 if (!instr->InputAt(0)->IsDoubleRegister()) {
654 __ fld_d(i.InputOperand(0));
655 }
656 __ TruncateX87TOSToI(i.OutputRegister());
657 if (!instr->InputAt(0)->IsDoubleRegister()) {
658 __ fstp(0);
659 }
660 break;
661 }
662 case kArchStoreWithWriteBarrier: {
663 RecordWriteMode mode =
664 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
665 Register object = i.InputRegister(0);
666 size_t index = 0;
667 Operand operand = i.MemoryOperand(&index);
668 Register value = i.InputRegister(index);
669 Register scratch0 = i.TempRegister(0);
670 Register scratch1 = i.TempRegister(1);
671 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value,
672 scratch0, scratch1, mode);
673 __ mov(operand, value);
674 __ CheckPageFlag(object, scratch0,
675 MemoryChunk::kPointersFromHereAreInterestingMask,
676 not_zero, ool->entry());
677 __ bind(ool->exit());
678 break;
679 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100680 case kArchStackSlot: {
681 FrameOffset offset =
682 frame_access_state()->GetFrameOffset(i.InputInt32(0));
683 Register base;
684 if (offset.from_stack_pointer()) {
685 base = esp;
686 } else {
687 base = ebp;
688 }
689 __ lea(i.OutputRegister(), Operand(base, offset.offset()));
690 break;
691 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000692 case kX87Add:
693 if (HasImmediateInput(instr, 1)) {
694 __ add(i.InputOperand(0), i.InputImmediate(1));
695 } else {
696 __ add(i.InputRegister(0), i.InputOperand(1));
697 }
698 break;
699 case kX87And:
700 if (HasImmediateInput(instr, 1)) {
701 __ and_(i.InputOperand(0), i.InputImmediate(1));
702 } else {
703 __ and_(i.InputRegister(0), i.InputOperand(1));
704 }
705 break;
706 case kX87Cmp:
Ben Murdochda12d292016-06-02 14:46:10 +0100707 ASSEMBLE_COMPARE(cmp);
708 break;
709 case kX87Cmp16:
710 ASSEMBLE_COMPARE(cmpw);
711 break;
712 case kX87Cmp8:
713 ASSEMBLE_COMPARE(cmpb);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000714 break;
715 case kX87Test:
Ben Murdochda12d292016-06-02 14:46:10 +0100716 ASSEMBLE_COMPARE(test);
717 break;
718 case kX87Test16:
719 ASSEMBLE_COMPARE(test_w);
720 break;
721 case kX87Test8:
722 ASSEMBLE_COMPARE(test_b);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000723 break;
724 case kX87Imul:
725 if (HasImmediateInput(instr, 1)) {
726 __ imul(i.OutputRegister(), i.InputOperand(0), i.InputInt32(1));
727 } else {
728 __ imul(i.OutputRegister(), i.InputOperand(1));
729 }
730 break;
731 case kX87ImulHigh:
732 __ imul(i.InputRegister(1));
733 break;
734 case kX87UmulHigh:
735 __ mul(i.InputRegister(1));
736 break;
737 case kX87Idiv:
738 __ cdq();
739 __ idiv(i.InputOperand(1));
740 break;
741 case kX87Udiv:
742 __ Move(edx, Immediate(0));
743 __ div(i.InputOperand(1));
744 break;
745 case kX87Not:
746 __ not_(i.OutputOperand());
747 break;
748 case kX87Neg:
749 __ neg(i.OutputOperand());
750 break;
751 case kX87Or:
752 if (HasImmediateInput(instr, 1)) {
753 __ or_(i.InputOperand(0), i.InputImmediate(1));
754 } else {
755 __ or_(i.InputRegister(0), i.InputOperand(1));
756 }
757 break;
758 case kX87Xor:
759 if (HasImmediateInput(instr, 1)) {
760 __ xor_(i.InputOperand(0), i.InputImmediate(1));
761 } else {
762 __ xor_(i.InputRegister(0), i.InputOperand(1));
763 }
764 break;
765 case kX87Sub:
766 if (HasImmediateInput(instr, 1)) {
767 __ sub(i.InputOperand(0), i.InputImmediate(1));
768 } else {
769 __ sub(i.InputRegister(0), i.InputOperand(1));
770 }
771 break;
772 case kX87Shl:
773 if (HasImmediateInput(instr, 1)) {
774 __ shl(i.OutputOperand(), i.InputInt5(1));
775 } else {
776 __ shl_cl(i.OutputOperand());
777 }
778 break;
779 case kX87Shr:
780 if (HasImmediateInput(instr, 1)) {
781 __ shr(i.OutputOperand(), i.InputInt5(1));
782 } else {
783 __ shr_cl(i.OutputOperand());
784 }
785 break;
786 case kX87Sar:
787 if (HasImmediateInput(instr, 1)) {
788 __ sar(i.OutputOperand(), i.InputInt5(1));
789 } else {
790 __ sar_cl(i.OutputOperand());
791 }
792 break;
Ben Murdochda12d292016-06-02 14:46:10 +0100793 case kX87AddPair: {
794 // i.OutputRegister(0) == i.InputRegister(0) ... left low word.
795 // i.InputRegister(1) ... left high word.
796 // i.InputRegister(2) ... right low word.
797 // i.InputRegister(3) ... right high word.
798 bool use_temp = false;
799 if (i.OutputRegister(0).code() == i.InputRegister(1).code() ||
800 i.OutputRegister(0).code() == i.InputRegister(3).code()) {
801 // We cannot write to the output register directly, because it would
802 // overwrite an input for adc. We have to use the temp register.
803 use_temp = true;
804 __ Move(i.TempRegister(0), i.InputRegister(0));
805 __ add(i.TempRegister(0), i.InputRegister(2));
806 } else {
807 __ add(i.OutputRegister(0), i.InputRegister(2));
808 }
809 __ adc(i.InputRegister(1), Operand(i.InputRegister(3)));
810 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) {
811 __ Move(i.OutputRegister(1), i.InputRegister(1));
812 }
813 if (use_temp) {
814 __ Move(i.OutputRegister(0), i.TempRegister(0));
815 }
816 break;
817 }
818 case kX87SubPair: {
819 // i.OutputRegister(0) == i.InputRegister(0) ... left low word.
820 // i.InputRegister(1) ... left high word.
821 // i.InputRegister(2) ... right low word.
822 // i.InputRegister(3) ... right high word.
823 bool use_temp = false;
824 if (i.OutputRegister(0).code() == i.InputRegister(1).code() ||
825 i.OutputRegister(0).code() == i.InputRegister(3).code()) {
826 // We cannot write to the output register directly, because it would
827 // overwrite an input for adc. We have to use the temp register.
828 use_temp = true;
829 __ Move(i.TempRegister(0), i.InputRegister(0));
830 __ sub(i.TempRegister(0), i.InputRegister(2));
831 } else {
832 __ sub(i.OutputRegister(0), i.InputRegister(2));
833 }
834 __ sbb(i.InputRegister(1), Operand(i.InputRegister(3)));
835 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) {
836 __ Move(i.OutputRegister(1), i.InputRegister(1));
837 }
838 if (use_temp) {
839 __ Move(i.OutputRegister(0), i.TempRegister(0));
840 }
841 break;
842 }
843 case kX87MulPair: {
844 __ imul(i.OutputRegister(1), i.InputOperand(0));
845 __ mov(i.TempRegister(0), i.InputOperand(1));
846 __ imul(i.TempRegister(0), i.InputOperand(2));
847 __ add(i.OutputRegister(1), i.TempRegister(0));
848 __ mov(i.OutputRegister(0), i.InputOperand(0));
849 // Multiplies the low words and stores them in eax and edx.
850 __ mul(i.InputRegister(2));
851 __ add(i.OutputRegister(1), i.TempRegister(0));
852
853 break;
854 }
855 case kX87ShlPair:
856 if (HasImmediateInput(instr, 2)) {
857 __ ShlPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
858 } else {
859 // Shift has been loaded into CL by the register allocator.
860 __ ShlPair_cl(i.InputRegister(1), i.InputRegister(0));
861 }
862 break;
863 case kX87ShrPair:
864 if (HasImmediateInput(instr, 2)) {
865 __ ShrPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
866 } else {
867 // Shift has been loaded into CL by the register allocator.
868 __ ShrPair_cl(i.InputRegister(1), i.InputRegister(0));
869 }
870 break;
871 case kX87SarPair:
872 if (HasImmediateInput(instr, 2)) {
873 __ SarPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2));
874 } else {
875 // Shift has been loaded into CL by the register allocator.
876 __ SarPair_cl(i.InputRegister(1), i.InputRegister(0));
877 }
878 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000879 case kX87Ror:
880 if (HasImmediateInput(instr, 1)) {
881 __ ror(i.OutputOperand(), i.InputInt5(1));
882 } else {
883 __ ror_cl(i.OutputOperand());
884 }
885 break;
886 case kX87Lzcnt:
887 __ Lzcnt(i.OutputRegister(), i.InputOperand(0));
888 break;
889 case kX87Popcnt:
890 __ Popcnt(i.OutputRegister(), i.InputOperand(0));
891 break;
892 case kX87LoadFloat64Constant: {
893 InstructionOperand* source = instr->InputAt(0);
894 InstructionOperand* destination = instr->Output();
895 DCHECK(source->IsConstant());
896 X87OperandConverter g(this, nullptr);
897 Constant src_constant = g.ToConstant(source);
898
899 DCHECK_EQ(Constant::kFloat64, src_constant.type());
900 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64());
901 uint32_t lower = static_cast<uint32_t>(src);
902 uint32_t upper = static_cast<uint32_t>(src >> 32);
903 if (destination->IsDoubleRegister()) {
904 __ sub(esp, Immediate(kDoubleSize));
905 __ mov(MemOperand(esp, 0), Immediate(lower));
906 __ mov(MemOperand(esp, kInt32Size), Immediate(upper));
907 __ fstp(0);
908 __ fld_d(MemOperand(esp, 0));
909 __ add(esp, Immediate(kDoubleSize));
910 } else {
911 UNREACHABLE();
912 }
913 break;
914 }
915 case kX87Float32Cmp: {
916 __ fld_s(MemOperand(esp, kFloatSize));
917 __ fld_s(MemOperand(esp, 0));
918 __ FCmp();
919 __ lea(esp, Operand(esp, 2 * kFloatSize));
920 break;
921 }
922 case kX87Float32Add: {
923 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
924 __ VerifyX87StackDepth(1);
925 }
926 __ X87SetFPUCW(0x027F);
927 __ fstp(0);
928 __ fld_s(MemOperand(esp, 0));
929 __ fld_s(MemOperand(esp, kFloatSize));
930 __ faddp();
931 // Clear stack.
932 __ lea(esp, Operand(esp, 2 * kFloatSize));
933 // Restore the default value of control word.
934 __ X87SetFPUCW(0x037F);
935 break;
936 }
937 case kX87Float32Sub: {
938 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
939 __ VerifyX87StackDepth(1);
940 }
941 __ X87SetFPUCW(0x027F);
942 __ fstp(0);
943 __ fld_s(MemOperand(esp, kFloatSize));
944 __ fld_s(MemOperand(esp, 0));
945 __ fsubp();
946 // Clear stack.
947 __ lea(esp, Operand(esp, 2 * kFloatSize));
948 // Restore the default value of control word.
949 __ X87SetFPUCW(0x037F);
950 break;
951 }
952 case kX87Float32Mul: {
953 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
954 __ VerifyX87StackDepth(1);
955 }
956 __ X87SetFPUCW(0x027F);
957 __ fstp(0);
958 __ fld_s(MemOperand(esp, kFloatSize));
959 __ fld_s(MemOperand(esp, 0));
960 __ fmulp();
961 // Clear stack.
962 __ lea(esp, Operand(esp, 2 * kFloatSize));
963 // Restore the default value of control word.
964 __ X87SetFPUCW(0x037F);
965 break;
966 }
967 case kX87Float32Div: {
968 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
969 __ VerifyX87StackDepth(1);
970 }
971 __ X87SetFPUCW(0x027F);
972 __ fstp(0);
973 __ fld_s(MemOperand(esp, kFloatSize));
974 __ fld_s(MemOperand(esp, 0));
975 __ fdivp();
976 // Clear stack.
977 __ lea(esp, Operand(esp, 2 * kFloatSize));
978 // Restore the default value of control word.
979 __ X87SetFPUCW(0x037F);
980 break;
981 }
982 case kX87Float32Max: {
983 Label check_nan_left, check_zero, return_left, return_right;
984 Condition condition = below;
985 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
986 __ VerifyX87StackDepth(1);
987 }
988 __ fstp(0);
989 __ fld_s(MemOperand(esp, kFloatSize));
990 __ fld_s(MemOperand(esp, 0));
991 __ fld(1);
992 __ fld(1);
993 __ FCmp();
994
995 // At least one NaN.
996 // Return the second operands if one of the two operands is NaN
997 __ j(parity_even, &return_right, Label::kNear);
998 __ j(equal, &check_zero, Label::kNear); // left == right.
999 __ j(condition, &return_left, Label::kNear);
1000 __ jmp(&return_right, Label::kNear);
1001
1002 __ bind(&check_zero);
1003 __ fld(0);
1004 __ fldz();
1005 __ FCmp();
1006 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1007
1008 __ fadd(1);
1009 __ jmp(&return_left, Label::kNear);
1010
1011 __ bind(&return_right);
1012 __ fxch();
1013
1014 __ bind(&return_left);
1015 __ fstp(0);
1016 __ lea(esp, Operand(esp, 2 * kFloatSize));
1017 break;
1018 }
1019 case kX87Float32Min: {
1020 Label check_nan_left, check_zero, return_left, return_right;
1021 Condition condition = above;
1022 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1023 __ VerifyX87StackDepth(1);
1024 }
1025 __ fstp(0);
1026 __ fld_s(MemOperand(esp, kFloatSize));
1027 __ fld_s(MemOperand(esp, 0));
1028 __ fld(1);
1029 __ fld(1);
1030 __ FCmp();
1031 // At least one NaN.
1032 // Return the second operands if one of the two operands is NaN
1033 __ j(parity_even, &return_right, Label::kNear);
1034 __ j(equal, &check_zero, Label::kNear); // left == right.
1035 __ j(condition, &return_left, Label::kNear);
1036 __ jmp(&return_right, Label::kNear);
1037
1038 __ bind(&check_zero);
1039 __ fld(0);
1040 __ fldz();
1041 __ FCmp();
1042 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1043 // At this point, both left and right are either 0 or -0.
1044 // Push st0 and st1 to stack, then pop them to temp registers and OR them,
1045 // load it to left.
1046 __ push(eax);
1047 __ fld(1);
1048 __ fld(1);
1049 __ sub(esp, Immediate(2 * kPointerSize));
1050 __ fstp_s(MemOperand(esp, 0));
1051 __ fstp_s(MemOperand(esp, kPointerSize));
1052 __ pop(eax);
1053 __ xor_(MemOperand(esp, 0), eax);
1054 __ fstp(0);
1055 __ fld_s(MemOperand(esp, 0));
1056 __ pop(eax); // restore esp
1057 __ pop(eax); // restore esp
1058 __ jmp(&return_left, Label::kNear);
1059
1060
1061 __ bind(&return_right);
1062 __ fxch();
1063
1064 __ bind(&return_left);
1065 __ fstp(0);
1066 __ lea(esp, Operand(esp, 2 * kFloatSize));
1067 break;
1068 }
1069 case kX87Float32Sqrt: {
1070 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1071 __ VerifyX87StackDepth(1);
1072 }
1073 __ fstp(0);
1074 __ fld_s(MemOperand(esp, 0));
1075 __ fsqrt();
1076 __ lea(esp, Operand(esp, kFloatSize));
1077 break;
1078 }
1079 case kX87Float32Abs: {
1080 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1081 __ VerifyX87StackDepth(1);
1082 }
1083 __ fstp(0);
1084 __ fld_s(MemOperand(esp, 0));
1085 __ fabs();
1086 __ lea(esp, Operand(esp, kFloatSize));
1087 break;
1088 }
1089 case kX87Float32Round: {
1090 RoundingMode mode =
1091 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1092 // Set the correct round mode in x87 control register
1093 __ X87SetRC((mode << 10));
1094
1095 if (!instr->InputAt(0)->IsDoubleRegister()) {
1096 InstructionOperand* input = instr->InputAt(0);
1097 USE(input);
1098 DCHECK(input->IsDoubleStackSlot());
1099 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1100 __ VerifyX87StackDepth(1);
1101 }
1102 __ fstp(0);
1103 __ fld_s(i.InputOperand(0));
1104 }
1105 __ frndint();
1106 __ X87SetRC(0x0000);
1107 break;
1108 }
1109 case kX87Float64Add: {
1110 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1111 __ VerifyX87StackDepth(1);
1112 }
1113 __ X87SetFPUCW(0x027F);
1114 __ fstp(0);
1115 __ fld_d(MemOperand(esp, 0));
1116 __ fld_d(MemOperand(esp, kDoubleSize));
1117 __ faddp();
1118 // Clear stack.
1119 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1120 // Restore the default value of control word.
1121 __ X87SetFPUCW(0x037F);
1122 break;
1123 }
1124 case kX87Float64Sub: {
1125 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1126 __ VerifyX87StackDepth(1);
1127 }
1128 __ X87SetFPUCW(0x027F);
1129 __ fstp(0);
1130 __ fld_d(MemOperand(esp, kDoubleSize));
1131 __ fsub_d(MemOperand(esp, 0));
1132 // Clear stack.
1133 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1134 // Restore the default value of control word.
1135 __ X87SetFPUCW(0x037F);
1136 break;
1137 }
1138 case kX87Float64Mul: {
1139 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1140 __ VerifyX87StackDepth(1);
1141 }
1142 __ X87SetFPUCW(0x027F);
1143 __ fstp(0);
1144 __ fld_d(MemOperand(esp, kDoubleSize));
1145 __ fmul_d(MemOperand(esp, 0));
1146 // Clear stack.
1147 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1148 // Restore the default value of control word.
1149 __ X87SetFPUCW(0x037F);
1150 break;
1151 }
1152 case kX87Float64Div: {
1153 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1154 __ VerifyX87StackDepth(1);
1155 }
1156 __ X87SetFPUCW(0x027F);
1157 __ fstp(0);
1158 __ fld_d(MemOperand(esp, kDoubleSize));
1159 __ fdiv_d(MemOperand(esp, 0));
1160 // Clear stack.
1161 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1162 // Restore the default value of control word.
1163 __ X87SetFPUCW(0x037F);
1164 break;
1165 }
1166 case kX87Float64Mod: {
1167 FrameScope frame_scope(&masm_, StackFrame::MANUAL);
1168 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1169 __ VerifyX87StackDepth(1);
1170 }
1171 __ mov(eax, esp);
1172 __ PrepareCallCFunction(4, eax);
1173 __ fstp(0);
1174 __ fld_d(MemOperand(eax, 0));
1175 __ fstp_d(Operand(esp, 1 * kDoubleSize));
1176 __ fld_d(MemOperand(eax, kDoubleSize));
1177 __ fstp_d(Operand(esp, 0));
1178 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1179 4);
1180 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1181 break;
1182 }
1183 case kX87Float64Max: {
1184 Label check_zero, return_left, return_right;
1185 Condition condition = below;
1186 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1187 __ VerifyX87StackDepth(1);
1188 }
1189 __ fstp(0);
1190 __ fld_d(MemOperand(esp, kDoubleSize));
1191 __ fld_d(MemOperand(esp, 0));
1192 __ fld(1);
1193 __ fld(1);
1194 __ FCmp();
1195 __ j(parity_even, &return_right,
1196 Label::kNear); // At least one NaN, Return right.
1197 __ j(equal, &check_zero, Label::kNear); // left == right.
1198 __ j(condition, &return_left, Label::kNear);
1199 __ jmp(&return_right, Label::kNear);
1200
1201 __ bind(&check_zero);
1202 __ fld(0);
1203 __ fldz();
1204 __ FCmp();
1205 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1206
1207 __ bind(&return_right);
1208 __ fxch();
1209
1210 __ bind(&return_left);
1211 __ fstp(0);
1212 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1213 break;
1214 }
1215 case kX87Float64Min: {
1216 Label check_zero, return_left, return_right;
1217 Condition condition = above;
1218 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1219 __ VerifyX87StackDepth(1);
1220 }
1221 __ fstp(0);
1222 __ fld_d(MemOperand(esp, kDoubleSize));
1223 __ fld_d(MemOperand(esp, 0));
1224 __ fld(1);
1225 __ fld(1);
1226 __ FCmp();
1227 __ j(parity_even, &return_right,
1228 Label::kNear); // At least one NaN, return right value.
1229 __ j(equal, &check_zero, Label::kNear); // left == right.
1230 __ j(condition, &return_left, Label::kNear);
1231 __ jmp(&return_right, Label::kNear);
1232
1233 __ bind(&check_zero);
1234 __ fld(0);
1235 __ fldz();
1236 __ FCmp();
1237 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1238
1239 __ bind(&return_right);
1240 __ fxch();
1241
1242 __ bind(&return_left);
1243 __ fstp(0);
1244 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1245 break;
1246 }
1247 case kX87Float64Abs: {
1248 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1249 __ VerifyX87StackDepth(1);
1250 }
1251 __ fstp(0);
1252 __ fld_d(MemOperand(esp, 0));
1253 __ fabs();
1254 __ lea(esp, Operand(esp, kDoubleSize));
1255 break;
1256 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001257 case kX87Int32ToFloat32: {
1258 InstructionOperand* input = instr->InputAt(0);
1259 DCHECK(input->IsRegister() || input->IsStackSlot());
1260 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1261 __ VerifyX87StackDepth(1);
1262 }
1263 __ fstp(0);
1264 if (input->IsRegister()) {
1265 Register input_reg = i.InputRegister(0);
1266 __ push(input_reg);
1267 __ fild_s(Operand(esp, 0));
1268 __ pop(input_reg);
1269 } else {
1270 __ fild_s(i.InputOperand(0));
1271 }
1272 break;
1273 }
1274 case kX87Uint32ToFloat32: {
1275 InstructionOperand* input = instr->InputAt(0);
1276 DCHECK(input->IsRegister() || input->IsStackSlot());
1277 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1278 __ VerifyX87StackDepth(1);
1279 }
1280 __ fstp(0);
1281 Label msb_set_src;
1282 Label jmp_return;
1283 // Put input integer into eax(tmporarilly)
1284 __ push(eax);
1285 if (input->IsRegister())
1286 __ mov(eax, i.InputRegister(0));
1287 else
1288 __ mov(eax, i.InputOperand(0));
1289
1290 __ test(eax, eax);
1291 __ j(sign, &msb_set_src, Label::kNear);
1292 __ push(eax);
1293 __ fild_s(Operand(esp, 0));
1294 __ pop(eax);
1295
1296 __ jmp(&jmp_return, Label::kNear);
1297 __ bind(&msb_set_src);
1298 // Need another temp reg
1299 __ push(ebx);
1300 __ mov(ebx, eax);
1301 __ shr(eax, 1);
1302 // Recover the least significant bit to avoid rounding errors.
1303 __ and_(ebx, Immediate(1));
1304 __ or_(eax, ebx);
1305 __ push(eax);
1306 __ fild_s(Operand(esp, 0));
1307 __ pop(eax);
1308 __ fld(0);
1309 __ faddp();
1310 // Restore the ebx
1311 __ pop(ebx);
1312 __ bind(&jmp_return);
1313 // Restore the eax
1314 __ pop(eax);
1315 break;
1316 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001317 case kX87Int32ToFloat64: {
1318 InstructionOperand* input = instr->InputAt(0);
1319 DCHECK(input->IsRegister() || input->IsStackSlot());
1320 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1321 __ VerifyX87StackDepth(1);
1322 }
1323 __ fstp(0);
1324 if (input->IsRegister()) {
1325 Register input_reg = i.InputRegister(0);
1326 __ push(input_reg);
1327 __ fild_s(Operand(esp, 0));
1328 __ pop(input_reg);
1329 } else {
1330 __ fild_s(i.InputOperand(0));
1331 }
1332 break;
1333 }
1334 case kX87Float32ToFloat64: {
1335 InstructionOperand* input = instr->InputAt(0);
1336 if (input->IsDoubleRegister()) {
1337 __ sub(esp, Immediate(kDoubleSize));
Ben Murdochda12d292016-06-02 14:46:10 +01001338 __ fstp_s(MemOperand(esp, 0));
1339 __ fld_s(MemOperand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 __ add(esp, Immediate(kDoubleSize));
1341 } else {
1342 DCHECK(input->IsDoubleStackSlot());
1343 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1344 __ VerifyX87StackDepth(1);
1345 }
1346 __ fstp(0);
1347 __ fld_s(i.InputOperand(0));
1348 }
1349 break;
1350 }
1351 case kX87Uint32ToFloat64: {
1352 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1353 __ VerifyX87StackDepth(1);
1354 }
1355 __ fstp(0);
1356 __ LoadUint32NoSSE2(i.InputRegister(0));
1357 break;
1358 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001359 case kX87Float32ToInt32: {
1360 if (!instr->InputAt(0)->IsDoubleRegister()) {
1361 __ fld_s(i.InputOperand(0));
1362 }
1363 __ TruncateX87TOSToI(i.OutputRegister(0));
1364 if (!instr->InputAt(0)->IsDoubleRegister()) {
1365 __ fstp(0);
1366 }
1367 break;
1368 }
1369 case kX87Float32ToUint32: {
1370 if (!instr->InputAt(0)->IsDoubleRegister()) {
1371 __ fld_s(i.InputOperand(0));
1372 }
1373 Label success;
1374 __ TruncateX87TOSToI(i.OutputRegister(0));
1375 __ test(i.OutputRegister(0), i.OutputRegister(0));
1376 __ j(positive, &success);
1377 __ push(Immediate(INT32_MIN));
1378 __ fild_s(Operand(esp, 0));
1379 __ lea(esp, Operand(esp, kPointerSize));
1380 __ faddp();
1381 __ TruncateX87TOSToI(i.OutputRegister(0));
1382 __ or_(i.OutputRegister(0), Immediate(0x80000000));
1383 __ bind(&success);
1384 if (!instr->InputAt(0)->IsDoubleRegister()) {
1385 __ fstp(0);
1386 }
1387 break;
1388 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 case kX87Float64ToInt32: {
1390 if (!instr->InputAt(0)->IsDoubleRegister()) {
1391 __ fld_d(i.InputOperand(0));
1392 }
1393 __ TruncateX87TOSToI(i.OutputRegister(0));
1394 if (!instr->InputAt(0)->IsDoubleRegister()) {
1395 __ fstp(0);
1396 }
1397 break;
1398 }
1399 case kX87Float64ToFloat32: {
1400 InstructionOperand* input = instr->InputAt(0);
1401 if (input->IsDoubleRegister()) {
1402 __ sub(esp, Immediate(kDoubleSize));
1403 __ fstp_s(MemOperand(esp, 0));
1404 __ fld_s(MemOperand(esp, 0));
1405 __ add(esp, Immediate(kDoubleSize));
1406 } else {
1407 DCHECK(input->IsDoubleStackSlot());
1408 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1409 __ VerifyX87StackDepth(1);
1410 }
1411 __ fstp(0);
1412 __ fld_d(i.InputOperand(0));
1413 __ sub(esp, Immediate(kDoubleSize));
1414 __ fstp_s(MemOperand(esp, 0));
1415 __ fld_s(MemOperand(esp, 0));
1416 __ add(esp, Immediate(kDoubleSize));
1417 }
1418 break;
1419 }
1420 case kX87Float64ToUint32: {
1421 __ push_imm32(-2147483648);
1422 if (!instr->InputAt(0)->IsDoubleRegister()) {
1423 __ fld_d(i.InputOperand(0));
1424 }
1425 __ fild_s(Operand(esp, 0));
Ben Murdochda12d292016-06-02 14:46:10 +01001426 __ fld(1);
1427 __ faddp();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 __ TruncateX87TOSToI(i.OutputRegister(0));
1429 __ add(esp, Immediate(kInt32Size));
1430 __ add(i.OutputRegister(), Immediate(0x80000000));
Ben Murdochda12d292016-06-02 14:46:10 +01001431 __ fstp(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001432 if (!instr->InputAt(0)->IsDoubleRegister()) {
1433 __ fstp(0);
1434 }
1435 break;
1436 }
1437 case kX87Float64ExtractHighWord32: {
1438 if (instr->InputAt(0)->IsDoubleRegister()) {
1439 __ sub(esp, Immediate(kDoubleSize));
1440 __ fst_d(MemOperand(esp, 0));
1441 __ mov(i.OutputRegister(), MemOperand(esp, kDoubleSize / 2));
1442 __ add(esp, Immediate(kDoubleSize));
1443 } else {
1444 InstructionOperand* input = instr->InputAt(0);
1445 USE(input);
1446 DCHECK(input->IsDoubleStackSlot());
1447 __ mov(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2));
1448 }
1449 break;
1450 }
1451 case kX87Float64ExtractLowWord32: {
1452 if (instr->InputAt(0)->IsDoubleRegister()) {
1453 __ sub(esp, Immediate(kDoubleSize));
1454 __ fst_d(MemOperand(esp, 0));
1455 __ mov(i.OutputRegister(), MemOperand(esp, 0));
1456 __ add(esp, Immediate(kDoubleSize));
1457 } else {
1458 InstructionOperand* input = instr->InputAt(0);
1459 USE(input);
1460 DCHECK(input->IsDoubleStackSlot());
1461 __ mov(i.OutputRegister(), i.InputOperand(0));
1462 }
1463 break;
1464 }
1465 case kX87Float64InsertHighWord32: {
1466 __ sub(esp, Immediate(kDoubleSize));
1467 __ fstp_d(MemOperand(esp, 0));
1468 __ mov(MemOperand(esp, kDoubleSize / 2), i.InputRegister(1));
1469 __ fld_d(MemOperand(esp, 0));
1470 __ add(esp, Immediate(kDoubleSize));
1471 break;
1472 }
1473 case kX87Float64InsertLowWord32: {
1474 __ sub(esp, Immediate(kDoubleSize));
1475 __ fstp_d(MemOperand(esp, 0));
1476 __ mov(MemOperand(esp, 0), i.InputRegister(1));
1477 __ fld_d(MemOperand(esp, 0));
1478 __ add(esp, Immediate(kDoubleSize));
1479 break;
1480 }
1481 case kX87Float64Sqrt: {
1482 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1483 __ VerifyX87StackDepth(1);
1484 }
1485 __ X87SetFPUCW(0x027F);
1486 __ fstp(0);
1487 __ fld_d(MemOperand(esp, 0));
1488 __ fsqrt();
1489 __ lea(esp, Operand(esp, kDoubleSize));
1490 __ X87SetFPUCW(0x037F);
1491 break;
1492 }
1493 case kX87Float64Round: {
1494 RoundingMode mode =
1495 static_cast<RoundingMode>(MiscField::decode(instr->opcode()));
1496 // Set the correct round mode in x87 control register
1497 __ X87SetRC((mode << 10));
1498
1499 if (!instr->InputAt(0)->IsDoubleRegister()) {
1500 InstructionOperand* input = instr->InputAt(0);
1501 USE(input);
1502 DCHECK(input->IsDoubleStackSlot());
1503 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1504 __ VerifyX87StackDepth(1);
1505 }
1506 __ fstp(0);
1507 __ fld_d(i.InputOperand(0));
1508 }
1509 __ frndint();
1510 __ X87SetRC(0x0000);
1511 break;
1512 }
1513 case kX87Float64Cmp: {
1514 __ fld_d(MemOperand(esp, kDoubleSize));
1515 __ fld_d(MemOperand(esp, 0));
1516 __ FCmp();
1517 __ lea(esp, Operand(esp, 2 * kDoubleSize));
1518 break;
1519 }
1520 case kX87Movsxbl:
1521 __ movsx_b(i.OutputRegister(), i.MemoryOperand());
1522 break;
1523 case kX87Movzxbl:
1524 __ movzx_b(i.OutputRegister(), i.MemoryOperand());
1525 break;
1526 case kX87Movb: {
1527 size_t index = 0;
1528 Operand operand = i.MemoryOperand(&index);
1529 if (HasImmediateInput(instr, index)) {
1530 __ mov_b(operand, i.InputInt8(index));
1531 } else {
1532 __ mov_b(operand, i.InputRegister(index));
1533 }
1534 break;
1535 }
1536 case kX87Movsxwl:
1537 __ movsx_w(i.OutputRegister(), i.MemoryOperand());
1538 break;
1539 case kX87Movzxwl:
1540 __ movzx_w(i.OutputRegister(), i.MemoryOperand());
1541 break;
1542 case kX87Movw: {
1543 size_t index = 0;
1544 Operand operand = i.MemoryOperand(&index);
1545 if (HasImmediateInput(instr, index)) {
1546 __ mov_w(operand, i.InputInt16(index));
1547 } else {
1548 __ mov_w(operand, i.InputRegister(index));
1549 }
1550 break;
1551 }
1552 case kX87Movl:
1553 if (instr->HasOutput()) {
1554 __ mov(i.OutputRegister(), i.MemoryOperand());
1555 } else {
1556 size_t index = 0;
1557 Operand operand = i.MemoryOperand(&index);
1558 if (HasImmediateInput(instr, index)) {
1559 __ mov(operand, i.InputImmediate(index));
1560 } else {
1561 __ mov(operand, i.InputRegister(index));
1562 }
1563 }
1564 break;
1565 case kX87Movsd: {
1566 if (instr->HasOutput()) {
1567 X87Register output = i.OutputDoubleRegister();
1568 USE(output);
1569 DCHECK(output.code() == 0);
1570 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1571 __ VerifyX87StackDepth(1);
1572 }
1573 __ fstp(0);
1574 __ fld_d(i.MemoryOperand());
1575 } else {
1576 size_t index = 0;
1577 Operand operand = i.MemoryOperand(&index);
1578 __ fst_d(operand);
1579 }
1580 break;
1581 }
1582 case kX87Movss: {
1583 if (instr->HasOutput()) {
1584 X87Register output = i.OutputDoubleRegister();
1585 USE(output);
1586 DCHECK(output.code() == 0);
1587 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1588 __ VerifyX87StackDepth(1);
1589 }
1590 __ fstp(0);
1591 __ fld_s(i.MemoryOperand());
1592 } else {
1593 size_t index = 0;
1594 Operand operand = i.MemoryOperand(&index);
1595 __ fst_s(operand);
1596 }
1597 break;
1598 }
1599 case kX87BitcastFI: {
1600 __ mov(i.OutputRegister(), MemOperand(esp, 0));
1601 __ lea(esp, Operand(esp, kFloatSize));
1602 break;
1603 }
1604 case kX87BitcastIF: {
1605 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
1606 __ VerifyX87StackDepth(1);
1607 }
1608 __ fstp(0);
1609 if (instr->InputAt(0)->IsRegister()) {
1610 __ lea(esp, Operand(esp, -kFloatSize));
1611 __ mov(MemOperand(esp, 0), i.InputRegister(0));
1612 __ fld_s(MemOperand(esp, 0));
1613 __ lea(esp, Operand(esp, kFloatSize));
1614 } else {
1615 __ fld_s(i.InputOperand(0));
1616 }
1617 break;
1618 }
1619 case kX87Lea: {
1620 AddressingMode mode = AddressingModeField::decode(instr->opcode());
1621 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation
1622 // and addressing mode just happens to work out. The "addl"/"subl" forms
1623 // in these cases are faster based on measurements.
1624 if (mode == kMode_MI) {
1625 __ Move(i.OutputRegister(), Immediate(i.InputInt32(0)));
1626 } else if (i.InputRegister(0).is(i.OutputRegister())) {
1627 if (mode == kMode_MRI) {
1628 int32_t constant_summand = i.InputInt32(1);
1629 if (constant_summand > 0) {
1630 __ add(i.OutputRegister(), Immediate(constant_summand));
1631 } else if (constant_summand < 0) {
1632 __ sub(i.OutputRegister(), Immediate(-constant_summand));
1633 }
1634 } else if (mode == kMode_MR1) {
1635 if (i.InputRegister(1).is(i.OutputRegister())) {
1636 __ shl(i.OutputRegister(), 1);
1637 } else {
1638 __ lea(i.OutputRegister(), i.MemoryOperand());
1639 }
1640 } else if (mode == kMode_M2) {
1641 __ shl(i.OutputRegister(), 1);
1642 } else if (mode == kMode_M4) {
1643 __ shl(i.OutputRegister(), 2);
1644 } else if (mode == kMode_M8) {
1645 __ shl(i.OutputRegister(), 3);
1646 } else {
1647 __ lea(i.OutputRegister(), i.MemoryOperand());
1648 }
1649 } else {
1650 __ lea(i.OutputRegister(), i.MemoryOperand());
1651 }
1652 break;
1653 }
1654 case kX87Push:
1655 if (instr->InputAt(0)->IsDoubleRegister()) {
1656 auto allocated = AllocatedOperand::cast(*instr->InputAt(0));
1657 if (allocated.representation() == MachineRepresentation::kFloat32) {
1658 __ sub(esp, Immediate(kDoubleSize));
1659 __ fst_s(Operand(esp, 0));
1660 } else {
1661 DCHECK(allocated.representation() == MachineRepresentation::kFloat64);
1662 __ sub(esp, Immediate(kDoubleSize));
1663 __ fst_d(Operand(esp, 0));
1664 }
1665 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1666 } else if (instr->InputAt(0)->IsDoubleStackSlot()) {
1667 auto allocated = AllocatedOperand::cast(*instr->InputAt(0));
1668 if (allocated.representation() == MachineRepresentation::kFloat32) {
1669 __ sub(esp, Immediate(kDoubleSize));
1670 __ fld_s(i.InputOperand(0));
1671 __ fstp_s(MemOperand(esp, 0));
1672 } else {
1673 DCHECK(allocated.representation() == MachineRepresentation::kFloat64);
1674 __ sub(esp, Immediate(kDoubleSize));
1675 __ fld_d(i.InputOperand(0));
1676 __ fstp_d(MemOperand(esp, 0));
1677 }
1678 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1679 } else if (HasImmediateInput(instr, 0)) {
1680 __ push(i.InputImmediate(0));
1681 frame_access_state()->IncreaseSPDelta(1);
1682 } else {
1683 __ push(i.InputOperand(0));
1684 frame_access_state()->IncreaseSPDelta(1);
1685 }
1686 break;
1687 case kX87Poke: {
1688 int const slot = MiscField::decode(instr->opcode());
1689 if (HasImmediateInput(instr, 0)) {
1690 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0));
1691 } else {
1692 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0));
1693 }
1694 break;
1695 }
1696 case kX87PushFloat32:
1697 __ lea(esp, Operand(esp, -kFloatSize));
1698 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1699 __ fld_s(i.InputOperand(0));
1700 __ fstp_s(MemOperand(esp, 0));
1701 } else if (instr->InputAt(0)->IsDoubleRegister()) {
1702 __ fst_s(MemOperand(esp, 0));
1703 } else {
1704 UNREACHABLE();
1705 }
1706 break;
1707 case kX87PushFloat64:
1708 __ lea(esp, Operand(esp, -kDoubleSize));
1709 if (instr->InputAt(0)->IsDoubleStackSlot()) {
1710 __ fld_d(i.InputOperand(0));
1711 __ fstp_d(MemOperand(esp, 0));
1712 } else if (instr->InputAt(0)->IsDoubleRegister()) {
1713 __ fst_d(MemOperand(esp, 0));
1714 } else {
1715 UNREACHABLE();
1716 }
1717 break;
1718 case kCheckedLoadInt8:
1719 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_b);
1720 break;
1721 case kCheckedLoadUint8:
1722 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_b);
1723 break;
1724 case kCheckedLoadInt16:
1725 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_w);
1726 break;
1727 case kCheckedLoadUint16:
1728 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_w);
1729 break;
1730 case kCheckedLoadWord32:
1731 ASSEMBLE_CHECKED_LOAD_INTEGER(mov);
1732 break;
1733 case kCheckedLoadFloat32:
1734 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_s);
1735 break;
1736 case kCheckedLoadFloat64:
1737 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_d);
1738 break;
1739 case kCheckedStoreWord8:
1740 ASSEMBLE_CHECKED_STORE_INTEGER(mov_b);
1741 break;
1742 case kCheckedStoreWord16:
1743 ASSEMBLE_CHECKED_STORE_INTEGER(mov_w);
1744 break;
1745 case kCheckedStoreWord32:
1746 ASSEMBLE_CHECKED_STORE_INTEGER(mov);
1747 break;
1748 case kCheckedStoreFloat32:
1749 ASSEMBLE_CHECKED_STORE_FLOAT(fst_s);
1750 break;
1751 case kCheckedStoreFloat64:
1752 ASSEMBLE_CHECKED_STORE_FLOAT(fst_d);
1753 break;
1754 case kX87StackCheck: {
1755 ExternalReference const stack_limit =
1756 ExternalReference::address_of_stack_limit(isolate());
1757 __ cmp(esp, Operand::StaticVariable(stack_limit));
1758 break;
1759 }
1760 case kCheckedLoadWord64:
1761 case kCheckedStoreWord64:
1762 UNREACHABLE(); // currently unsupported checked int64 load/store.
1763 break;
1764 }
1765} // NOLINT(readability/fn_size)
1766
1767
1768// Assembles a branch after an instruction.
1769void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1770 X87OperandConverter i(this, instr);
1771 Label::Distance flabel_distance =
1772 branch->fallthru ? Label::kNear : Label::kFar;
Ben Murdochda12d292016-06-02 14:46:10 +01001773
1774 Label done;
1775 Label tlabel_tmp;
1776 Label flabel_tmp;
1777 Label* tlabel = &tlabel_tmp;
1778 Label* flabel = &flabel_tmp;
1779
1780 Label* tlabel_dst = branch->true_label;
1781 Label* flabel_dst = branch->false_label;
1782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 switch (branch->condition) {
1784 case kUnorderedEqual:
1785 __ j(parity_even, flabel, flabel_distance);
1786 // Fall through.
1787 case kEqual:
1788 __ j(equal, tlabel);
1789 break;
1790 case kUnorderedNotEqual:
1791 __ j(parity_even, tlabel);
1792 // Fall through.
1793 case kNotEqual:
1794 __ j(not_equal, tlabel);
1795 break;
1796 case kSignedLessThan:
1797 __ j(less, tlabel);
1798 break;
1799 case kSignedGreaterThanOrEqual:
1800 __ j(greater_equal, tlabel);
1801 break;
1802 case kSignedLessThanOrEqual:
1803 __ j(less_equal, tlabel);
1804 break;
1805 case kSignedGreaterThan:
1806 __ j(greater, tlabel);
1807 break;
1808 case kUnsignedLessThan:
1809 __ j(below, tlabel);
1810 break;
1811 case kUnsignedGreaterThanOrEqual:
1812 __ j(above_equal, tlabel);
1813 break;
1814 case kUnsignedLessThanOrEqual:
1815 __ j(below_equal, tlabel);
1816 break;
1817 case kUnsignedGreaterThan:
1818 __ j(above, tlabel);
1819 break;
1820 case kOverflow:
1821 __ j(overflow, tlabel);
1822 break;
1823 case kNotOverflow:
1824 __ j(no_overflow, tlabel);
1825 break;
1826 default:
1827 UNREACHABLE();
1828 break;
1829 }
1830 // Add a jump if not falling through to the next block.
1831 if (!branch->fallthru) __ jmp(flabel);
Ben Murdochda12d292016-06-02 14:46:10 +01001832
1833 __ jmp(&done);
1834 __ bind(&tlabel_tmp);
1835 FlagsMode mode = FlagsModeField::decode(instr->opcode());
1836 if (mode == kFlags_deoptimize) {
1837 int double_register_param_count = 0;
1838 int x87_layout = 0;
1839 for (size_t i = 0; i < instr->InputCount(); i++) {
1840 if (instr->InputAt(i)->IsDoubleRegister()) {
1841 double_register_param_count++;
1842 }
1843 }
1844 // Currently we use only one X87 register. If double_register_param_count
1845 // is bigger than 1, it means duplicated double register is added to input
1846 // of this instruction.
1847 if (double_register_param_count > 0) {
1848 x87_layout = (0 << 3) | 1;
1849 }
1850 // The layout of x87 register stack is loaded on the top of FPU register
1851 // stack for deoptimization.
1852 __ push(Immediate(x87_layout));
1853 __ fild_s(MemOperand(esp, 0));
1854 __ lea(esp, Operand(esp, kPointerSize));
1855 }
1856 __ jmp(tlabel_dst);
1857 __ bind(&flabel_tmp);
1858 __ jmp(flabel_dst);
1859 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860}
1861
1862
1863void CodeGenerator::AssembleArchJump(RpoNumber target) {
1864 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
1865}
1866
1867
1868// Assembles boolean materializations after an instruction.
1869void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1870 FlagsCondition condition) {
1871 X87OperandConverter i(this, instr);
1872 Label done;
1873
1874 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1875 // last output of the instruction.
1876 Label check;
1877 DCHECK_NE(0u, instr->OutputCount());
1878 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1879 Condition cc = no_condition;
1880 switch (condition) {
1881 case kUnorderedEqual:
1882 __ j(parity_odd, &check, Label::kNear);
1883 __ Move(reg, Immediate(0));
1884 __ jmp(&done, Label::kNear);
1885 // Fall through.
1886 case kEqual:
1887 cc = equal;
1888 break;
1889 case kUnorderedNotEqual:
1890 __ j(parity_odd, &check, Label::kNear);
1891 __ mov(reg, Immediate(1));
1892 __ jmp(&done, Label::kNear);
1893 // Fall through.
1894 case kNotEqual:
1895 cc = not_equal;
1896 break;
1897 case kSignedLessThan:
1898 cc = less;
1899 break;
1900 case kSignedGreaterThanOrEqual:
1901 cc = greater_equal;
1902 break;
1903 case kSignedLessThanOrEqual:
1904 cc = less_equal;
1905 break;
1906 case kSignedGreaterThan:
1907 cc = greater;
1908 break;
1909 case kUnsignedLessThan:
1910 cc = below;
1911 break;
1912 case kUnsignedGreaterThanOrEqual:
1913 cc = above_equal;
1914 break;
1915 case kUnsignedLessThanOrEqual:
1916 cc = below_equal;
1917 break;
1918 case kUnsignedGreaterThan:
1919 cc = above;
1920 break;
1921 case kOverflow:
1922 cc = overflow;
1923 break;
1924 case kNotOverflow:
1925 cc = no_overflow;
1926 break;
1927 default:
1928 UNREACHABLE();
1929 break;
1930 }
1931 __ bind(&check);
1932 if (reg.is_byte_register()) {
1933 // setcc for byte registers (al, bl, cl, dl).
1934 __ setcc(cc, reg);
1935 __ movzx_b(reg, reg);
1936 } else {
1937 // Emit a branch to set a register to either 1 or 0.
1938 Label set;
1939 __ j(cc, &set, Label::kNear);
1940 __ Move(reg, Immediate(0));
1941 __ jmp(&done, Label::kNear);
1942 __ bind(&set);
1943 __ mov(reg, Immediate(1));
1944 }
1945 __ bind(&done);
1946}
1947
1948
1949void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1950 X87OperandConverter i(this, instr);
1951 Register input = i.InputRegister(0);
1952 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1953 __ cmp(input, Immediate(i.InputInt32(index + 0)));
1954 __ j(equal, GetLabel(i.InputRpo(index + 1)));
1955 }
1956 AssembleArchJump(i.InputRpo(1));
1957}
1958
1959
1960void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1961 X87OperandConverter i(this, instr);
1962 Register input = i.InputRegister(0);
1963 size_t const case_count = instr->InputCount() - 2;
1964 Label** cases = zone()->NewArray<Label*>(case_count);
1965 for (size_t index = 0; index < case_count; ++index) {
1966 cases[index] = GetLabel(i.InputRpo(index + 2));
1967 }
1968 Label* const table = AddJumpTable(cases, case_count);
1969 __ cmp(input, Immediate(case_count));
1970 __ j(above_equal, GetLabel(i.InputRpo(1)));
1971 __ jmp(Operand::JumpTable(input, times_4, table));
1972}
1973
1974
1975void CodeGenerator::AssembleDeoptimizerCall(
1976 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1977 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1978 isolate(), deoptimization_id, bailout_type);
1979 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1980}
1981
1982
1983// The calling convention for JSFunctions on X87 passes arguments on the
1984// stack and the JSFunction and context in EDI and ESI, respectively, thus
1985// the steps of the call look as follows:
1986
1987// --{ before the call instruction }--------------------------------------------
1988// | caller frame |
1989// ^ esp ^ ebp
1990
1991// --{ push arguments and setup ESI, EDI }--------------------------------------
1992// | args + receiver | caller frame |
1993// ^ esp ^ ebp
1994// [edi = JSFunction, esi = context]
1995
1996// --{ call [edi + kCodeEntryOffset] }------------------------------------------
1997// | RET | args + receiver | caller frame |
1998// ^ esp ^ ebp
1999
2000// =={ prologue of called function }============================================
2001// --{ push ebp }---------------------------------------------------------------
2002// | FP | RET | args + receiver | caller frame |
2003// ^ esp ^ ebp
2004
2005// --{ mov ebp, esp }-----------------------------------------------------------
2006// | FP | RET | args + receiver | caller frame |
2007// ^ ebp,esp
2008
2009// --{ push esi }---------------------------------------------------------------
2010// | CTX | FP | RET | args + receiver | caller frame |
2011// ^esp ^ ebp
2012
2013// --{ push edi }---------------------------------------------------------------
2014// | FNC | CTX | FP | RET | args + receiver | caller frame |
2015// ^esp ^ ebp
2016
2017// --{ subi esp, #N }-----------------------------------------------------------
2018// | callee frame | FNC | CTX | FP | RET | args + receiver | caller frame |
2019// ^esp ^ ebp
2020
2021// =={ body of called function }================================================
2022
2023// =={ epilogue of called function }============================================
2024// --{ mov esp, ebp }-----------------------------------------------------------
2025// | FP | RET | args + receiver | caller frame |
2026// ^ esp,ebp
2027
2028// --{ pop ebp }-----------------------------------------------------------
2029// | | RET | args + receiver | caller frame |
2030// ^ esp ^ ebp
2031
2032// --{ ret #A+1 }-----------------------------------------------------------
2033// | | caller frame |
2034// ^ esp ^ ebp
2035
2036
2037// Runtime function calls are accomplished by doing a stub call to the
2038// CEntryStub (a real code object). On X87 passes arguments on the
2039// stack, the number of arguments in EAX, the address of the runtime function
2040// in EBX, and the context in ESI.
2041
2042// --{ before the call instruction }--------------------------------------------
2043// | caller frame |
2044// ^ esp ^ ebp
2045
2046// --{ push arguments and setup EAX, EBX, and ESI }-----------------------------
2047// | args + receiver | caller frame |
2048// ^ esp ^ ebp
2049// [eax = #args, ebx = runtime function, esi = context]
2050
2051// --{ call #CEntryStub }-------------------------------------------------------
2052// | RET | args + receiver | caller frame |
2053// ^ esp ^ ebp
2054
2055// =={ body of runtime function }===============================================
2056
2057// --{ runtime returns }--------------------------------------------------------
2058// | caller frame |
2059// ^ esp ^ ebp
2060
2061// Other custom linkages (e.g. for calling directly into and out of C++) may
2062// need to save callee-saved registers on the stack, which is done in the
2063// function prologue of generated code.
2064
2065// --{ before the call instruction }--------------------------------------------
2066// | caller frame |
2067// ^ esp ^ ebp
2068
2069// --{ set up arguments in registers on stack }---------------------------------
2070// | args | caller frame |
2071// ^ esp ^ ebp
2072// [r0 = arg0, r1 = arg1, ...]
2073
2074// --{ call code }--------------------------------------------------------------
2075// | RET | args | caller frame |
2076// ^ esp ^ ebp
2077
2078// =={ prologue of called function }============================================
2079// --{ push ebp }---------------------------------------------------------------
2080// | FP | RET | args | caller frame |
2081// ^ esp ^ ebp
2082
2083// --{ mov ebp, esp }-----------------------------------------------------------
2084// | FP | RET | args | caller frame |
2085// ^ ebp,esp
2086
2087// --{ save registers }---------------------------------------------------------
2088// | regs | FP | RET | args | caller frame |
2089// ^ esp ^ ebp
2090
2091// --{ subi esp, #N }-----------------------------------------------------------
2092// | callee frame | regs | FP | RET | args | caller frame |
2093// ^esp ^ ebp
2094
2095// =={ body of called function }================================================
2096
2097// =={ epilogue of called function }============================================
2098// --{ restore registers }------------------------------------------------------
2099// | regs | FP | RET | args | caller frame |
2100// ^ esp ^ ebp
2101
2102// --{ mov esp, ebp }-----------------------------------------------------------
2103// | FP | RET | args | caller frame |
2104// ^ esp,ebp
2105
2106// --{ pop ebp }----------------------------------------------------------------
2107// | RET | args | caller frame |
2108// ^ esp ^ ebp
2109
2110
2111void CodeGenerator::AssemblePrologue() {
2112 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01002113 if (frame_access_state()->has_frame()) {
2114 if (descriptor->IsCFunctionCall()) {
2115 __ push(ebp);
2116 __ mov(ebp, esp);
2117 } else if (descriptor->IsJSFunctionCall()) {
2118 __ Prologue(this->info()->GeneratePreagedPrologue());
2119 } else {
2120 __ StubPrologue(info()->GetOutputStackFrameType());
2121 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002122 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 int stack_shrink_slots = frame()->GetSpillSlotCount();
2124 if (info()->is_osr()) {
2125 // TurboFan OSR-compiled functions cannot be entered directly.
2126 __ Abort(kShouldNotDirectlyEnterOsrFunction);
2127
2128 // Unoptimized code jumps directly to this entrypoint while the unoptimized
2129 // frame is still on the stack. Optimized code uses OSR values directly from
2130 // the unoptimized frame. Thus, all that needs to be done is to allocate the
2131 // remaining stack slots.
2132 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
2133 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002134 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdochda12d292016-06-02 14:46:10 +01002135
2136 // Initailize FPU state.
2137 __ fninit();
2138 __ fld1();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 }
2140
2141 const RegList saves = descriptor->CalleeSavedRegisters();
2142 if (stack_shrink_slots > 0) {
2143 __ sub(esp, Immediate(stack_shrink_slots * kPointerSize));
2144 }
2145
2146 if (saves != 0) { // Save callee-saved registers.
2147 DCHECK(!info()->is_osr());
2148 int pushed = 0;
2149 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
2150 if (!((1 << i) & saves)) continue;
2151 __ push(Register::from_code(i));
2152 ++pushed;
2153 }
2154 frame()->AllocateSavedCalleeRegisterSlots(pushed);
2155 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002156}
2157
2158
2159void CodeGenerator::AssembleReturn() {
2160 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
2161
2162 // Clear the FPU stack only if there is no return value in the stack.
2163 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
2164 __ VerifyX87StackDepth(1);
2165 }
2166 bool clear_stack = true;
2167 for (int i = 0; i < descriptor->ReturnCount(); i++) {
2168 MachineRepresentation rep = descriptor->GetReturnType(i).representation();
2169 LinkageLocation loc = descriptor->GetReturnLocation(i);
2170 if (IsFloatingPoint(rep) && loc == LinkageLocation::ForRegister(0)) {
2171 clear_stack = false;
2172 break;
2173 }
2174 }
2175 if (clear_stack) __ fstp(0);
2176
2177 int pop_count = static_cast<int>(descriptor->StackParameterCount());
2178 const RegList saves = descriptor->CalleeSavedRegisters();
2179 // Restore registers.
2180 if (saves != 0) {
2181 for (int i = 0; i < Register::kNumRegisters; i++) {
2182 if (!((1 << i) & saves)) continue;
2183 __ pop(Register::from_code(i));
2184 }
2185 }
2186
2187 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002188 AssembleDeconstructFrame();
2189 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002190 // Canonicalize JSFunction return sites for now.
2191 if (return_label_.is_bound()) {
2192 __ jmp(&return_label_);
2193 return;
2194 } else {
2195 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01002196 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197 }
2198 }
2199 if (pop_count == 0) {
2200 __ ret(0);
2201 } else {
2202 __ Ret(pop_count * kPointerSize, ebx);
2203 }
2204}
2205
2206
2207void CodeGenerator::AssembleMove(InstructionOperand* source,
2208 InstructionOperand* destination) {
2209 X87OperandConverter g(this, nullptr);
2210 // Dispatch on the source and destination operand kinds. Not all
2211 // combinations are possible.
2212 if (source->IsRegister()) {
2213 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2214 Register src = g.ToRegister(source);
2215 Operand dst = g.ToOperand(destination);
2216 __ mov(dst, src);
2217 } else if (source->IsStackSlot()) {
2218 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2219 Operand src = g.ToOperand(source);
2220 if (destination->IsRegister()) {
2221 Register dst = g.ToRegister(destination);
2222 __ mov(dst, src);
2223 } else {
2224 Operand dst = g.ToOperand(destination);
2225 __ push(src);
2226 __ pop(dst);
2227 }
2228 } else if (source->IsConstant()) {
2229 Constant src_constant = g.ToConstant(source);
2230 if (src_constant.type() == Constant::kHeapObject) {
2231 Handle<HeapObject> src = src_constant.ToHeapObject();
Ben Murdochda12d292016-06-02 14:46:10 +01002232 int slot;
2233 if (IsMaterializableFromFrame(src, &slot)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 if (destination->IsRegister()) {
2235 Register dst = g.ToRegister(destination);
Ben Murdochda12d292016-06-02 14:46:10 +01002236 __ mov(dst, g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 } else {
2238 DCHECK(destination->IsStackSlot());
2239 Operand dst = g.ToOperand(destination);
Ben Murdochda12d292016-06-02 14:46:10 +01002240 __ push(g.SlotToOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002241 __ pop(dst);
2242 }
2243 } else if (destination->IsRegister()) {
2244 Register dst = g.ToRegister(destination);
2245 __ LoadHeapObject(dst, src);
2246 } else {
2247 DCHECK(destination->IsStackSlot());
2248 Operand dst = g.ToOperand(destination);
2249 AllowDeferredHandleDereference embedding_raw_address;
2250 if (isolate()->heap()->InNewSpace(*src)) {
2251 __ PushHeapObject(src);
2252 __ pop(dst);
2253 } else {
2254 __ mov(dst, src);
2255 }
2256 }
2257 } else if (destination->IsRegister()) {
2258 Register dst = g.ToRegister(destination);
2259 __ Move(dst, g.ToImmediate(source));
2260 } else if (destination->IsStackSlot()) {
2261 Operand dst = g.ToOperand(destination);
2262 __ Move(dst, g.ToImmediate(source));
2263 } else if (src_constant.type() == Constant::kFloat32) {
2264 // TODO(turbofan): Can we do better here?
2265 uint32_t src = bit_cast<uint32_t>(src_constant.ToFloat32());
2266 if (destination->IsDoubleRegister()) {
2267 __ sub(esp, Immediate(kInt32Size));
2268 __ mov(MemOperand(esp, 0), Immediate(src));
2269 // always only push one value into the x87 stack.
2270 __ fstp(0);
2271 __ fld_s(MemOperand(esp, 0));
2272 __ add(esp, Immediate(kInt32Size));
2273 } else {
2274 DCHECK(destination->IsDoubleStackSlot());
2275 Operand dst = g.ToOperand(destination);
2276 __ Move(dst, Immediate(src));
2277 }
2278 } else {
2279 DCHECK_EQ(Constant::kFloat64, src_constant.type());
2280 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64());
2281 uint32_t lower = static_cast<uint32_t>(src);
2282 uint32_t upper = static_cast<uint32_t>(src >> 32);
2283 if (destination->IsDoubleRegister()) {
2284 __ sub(esp, Immediate(kDoubleSize));
2285 __ mov(MemOperand(esp, 0), Immediate(lower));
2286 __ mov(MemOperand(esp, kInt32Size), Immediate(upper));
2287 // always only push one value into the x87 stack.
2288 __ fstp(0);
2289 __ fld_d(MemOperand(esp, 0));
2290 __ add(esp, Immediate(kDoubleSize));
2291 } else {
2292 DCHECK(destination->IsDoubleStackSlot());
2293 Operand dst0 = g.ToOperand(destination);
2294 Operand dst1 = g.HighOperand(destination);
2295 __ Move(dst0, Immediate(lower));
2296 __ Move(dst1, Immediate(upper));
2297 }
2298 }
2299 } else if (source->IsDoubleRegister()) {
2300 DCHECK(destination->IsDoubleStackSlot());
2301 Operand dst = g.ToOperand(destination);
2302 auto allocated = AllocatedOperand::cast(*source);
2303 switch (allocated.representation()) {
2304 case MachineRepresentation::kFloat32:
2305 __ fst_s(dst);
2306 break;
2307 case MachineRepresentation::kFloat64:
2308 __ fst_d(dst);
2309 break;
2310 default:
2311 UNREACHABLE();
2312 }
2313 } else if (source->IsDoubleStackSlot()) {
2314 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
2315 Operand src = g.ToOperand(source);
2316 auto allocated = AllocatedOperand::cast(*source);
2317 if (destination->IsDoubleRegister()) {
2318 // always only push one value into the x87 stack.
2319 __ fstp(0);
2320 switch (allocated.representation()) {
2321 case MachineRepresentation::kFloat32:
2322 __ fld_s(src);
2323 break;
2324 case MachineRepresentation::kFloat64:
2325 __ fld_d(src);
2326 break;
2327 default:
2328 UNREACHABLE();
2329 }
2330 } else {
2331 Operand dst = g.ToOperand(destination);
2332 switch (allocated.representation()) {
2333 case MachineRepresentation::kFloat32:
2334 __ fld_s(src);
2335 __ fstp_s(dst);
2336 break;
2337 case MachineRepresentation::kFloat64:
2338 __ fld_d(src);
2339 __ fstp_d(dst);
2340 break;
2341 default:
2342 UNREACHABLE();
2343 }
2344 }
2345 } else {
2346 UNREACHABLE();
2347 }
2348}
2349
2350
2351void CodeGenerator::AssembleSwap(InstructionOperand* source,
2352 InstructionOperand* destination) {
2353 X87OperandConverter g(this, nullptr);
2354 // Dispatch on the source and destination operand kinds. Not all
2355 // combinations are possible.
2356 if (source->IsRegister() && destination->IsRegister()) {
2357 // Register-register.
2358 Register src = g.ToRegister(source);
2359 Register dst = g.ToRegister(destination);
2360 __ xchg(dst, src);
2361 } else if (source->IsRegister() && destination->IsStackSlot()) {
2362 // Register-memory.
2363 __ xchg(g.ToRegister(source), g.ToOperand(destination));
2364 } else if (source->IsStackSlot() && destination->IsStackSlot()) {
2365 // Memory-memory.
2366 Operand dst1 = g.ToOperand(destination);
2367 __ push(dst1);
2368 frame_access_state()->IncreaseSPDelta(1);
2369 Operand src1 = g.ToOperand(source);
2370 __ push(src1);
2371 Operand dst2 = g.ToOperand(destination);
2372 __ pop(dst2);
2373 frame_access_state()->IncreaseSPDelta(-1);
2374 Operand src2 = g.ToOperand(source);
2375 __ pop(src2);
2376 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
2377 UNREACHABLE();
2378 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) {
2379 auto allocated = AllocatedOperand::cast(*source);
2380 switch (allocated.representation()) {
2381 case MachineRepresentation::kFloat32:
2382 __ fld_s(g.ToOperand(destination));
2383 __ fxch();
2384 __ fstp_s(g.ToOperand(destination));
2385 break;
2386 case MachineRepresentation::kFloat64:
2387 __ fld_d(g.ToOperand(destination));
2388 __ fxch();
2389 __ fstp_d(g.ToOperand(destination));
2390 break;
2391 default:
2392 UNREACHABLE();
2393 }
2394 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) {
2395 auto allocated = AllocatedOperand::cast(*source);
2396 switch (allocated.representation()) {
2397 case MachineRepresentation::kFloat32:
2398 __ fld_s(g.ToOperand(source));
2399 __ fld_s(g.ToOperand(destination));
2400 __ fstp_s(g.ToOperand(source));
2401 __ fstp_s(g.ToOperand(destination));
2402 break;
2403 case MachineRepresentation::kFloat64:
2404 __ fld_d(g.ToOperand(source));
2405 __ fld_d(g.ToOperand(destination));
2406 __ fstp_d(g.ToOperand(source));
2407 __ fstp_d(g.ToOperand(destination));
2408 break;
2409 default:
2410 UNREACHABLE();
2411 }
2412 } else {
2413 // No other combinations are possible.
2414 UNREACHABLE();
2415 }
2416}
2417
2418
2419void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2420 for (size_t index = 0; index < target_count; ++index) {
2421 __ dd(targets[index]);
2422 }
2423}
2424
2425
2426void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
2427
2428
2429void CodeGenerator::EnsureSpaceForLazyDeopt() {
2430 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2431 return;
2432 }
2433
2434 int space_needed = Deoptimizer::patch_size();
2435 // Ensure that we have enough space after the previous lazy-bailout
2436 // instruction for patching the code here.
2437 int current_pc = masm()->pc_offset();
2438 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2439 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2440 __ Nop(padding_size);
2441 }
2442}
2443
2444#undef __
2445
2446} // namespace compiler
2447} // namespace internal
2448} // namespace v8