blob: 0cfbf7038891e4918e88b5d94c242030b034aa02 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CRANKSHAFT_X87_LITHIUM_CODEGEN_X87_H_
6#define V8_CRANKSHAFT_X87_LITHIUM_CODEGEN_X87_H_
7
8#include <map>
9
10#include "src/ast/scopes.h"
11#include "src/base/logging.h"
12#include "src/crankshaft/lithium-codegen.h"
13#include "src/crankshaft/x87/lithium-gap-resolver-x87.h"
14#include "src/crankshaft/x87/lithium-x87.h"
15#include "src/deoptimizer.h"
16#include "src/safepoint-table.h"
17#include "src/utils.h"
18
19namespace v8 {
20namespace internal {
21
22// Forward declarations.
23class LDeferredCode;
24class LGapNode;
25class SafepointGenerator;
26
27class LCodeGen: public LCodeGenBase {
28 public:
29 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
30 : LCodeGenBase(chunk, assembler, info),
31 jump_table_(4, info->zone()),
32 scope_(info->scope()),
33 deferred_(8, info->zone()),
34 dynamic_frame_alignment_(false),
35 support_aligned_spilled_doubles_(false),
36 frame_is_built_(false),
37 x87_stack_(assembler),
38 safepoints_(info->zone()),
39 resolver_(this),
40 expected_safepoint_kind_(Safepoint::kSimple) {
41 PopulateDeoptimizationLiteralsWithInlinedFunctions();
42 }
43
44 int LookupDestination(int block_id) const {
45 return chunk()->LookupDestination(block_id);
46 }
47
48 bool IsNextEmittedBlock(int block_id) const {
49 return LookupDestination(block_id) == GetNextEmittedBlock();
50 }
51
52 bool NeedsEagerFrame() const {
Ben Murdoch097c5b22016-05-18 11:27:45 +010053 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
54 !info()->IsStub() || info()->requires_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055 }
56 bool NeedsDeferredFrame() const {
57 return !NeedsEagerFrame() && info()->is_deferred_calling();
58 }
59
60 // Support for converting LOperands to assembler types.
61 Operand ToOperand(LOperand* op) const;
62 Register ToRegister(LOperand* op) const;
63 X87Register ToX87Register(LOperand* op) const;
64
65 bool IsInteger32(LConstantOperand* op) const;
66 bool IsSmi(LConstantOperand* op) const;
67 Immediate ToImmediate(LOperand* op, const Representation& r) const {
68 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
69 }
70 double ToDouble(LConstantOperand* op) const;
71
72 // Support for non-sse2 (x87) floating point stack handling.
73 // These functions maintain the mapping of physical stack registers to our
74 // virtual registers between instructions.
75 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand };
76
77 void X87Mov(X87Register reg, Operand src,
78 X87OperandType operand = kX87DoubleOperand);
79 void X87Mov(Operand src, X87Register reg,
80 X87OperandType operand = kX87DoubleOperand);
81 void X87Mov(X87Register reg, X87Register src,
82 X87OperandType operand = kX87DoubleOperand);
83
84 void X87PrepareBinaryOp(
85 X87Register left, X87Register right, X87Register result);
86
87 void X87LoadForUsage(X87Register reg);
88 void X87LoadForUsage(X87Register reg1, X87Register reg2);
89 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); }
90 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); }
91
92 void X87Fxch(X87Register reg, int other_slot = 0) {
93 x87_stack_.Fxch(reg, other_slot);
94 }
95 void X87Free(X87Register reg) {
96 x87_stack_.Free(reg);
97 }
98
99
100 bool X87StackEmpty() {
101 return x87_stack_.depth() == 0;
102 }
103
104 Handle<Object> ToHandle(LConstantOperand* op) const;
105
106 // The operand denoting the second word (the one with a higher address) of
107 // a double stack slot.
108 Operand HighOperand(LOperand* op);
109
110 // Try to generate code for the entire chunk, but it may fail if the
111 // chunk contains constructs we cannot handle. Returns true if the
112 // code generation attempt succeeded.
113 bool GenerateCode();
114
115 // Finish the code by setting stack height, safepoint, and bailout
116 // information on it.
117 void FinishCode(Handle<Code> code);
118
119 // Deferred code support.
120 void DoDeferredNumberTagD(LNumberTagD* instr);
121
122 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
123 void DoDeferredNumberTagIU(LInstruction* instr,
124 LOperand* value,
125 LOperand* temp,
126 IntegerSignedness signedness);
127
128 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
129 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
130 void DoDeferredStackCheck(LStackCheck* instr);
131 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
132 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
133 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
134 void DoDeferredAllocate(LAllocate* instr);
135 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
136 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
137 Register object,
138 Register index);
139
140 // Parallel move support.
141 void DoParallelMove(LParallelMove* move);
142 void DoGap(LGap* instr);
143
144 // Emit frame translation commands for an environment.
145 void WriteTranslation(LEnvironment* environment, Translation* translation);
146
147 void EnsureRelocSpaceForDeoptimization();
148
149 // Declare methods that deal with the individual node types.
150#define DECLARE_DO(type) void Do##type(L##type* node);
151 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
152#undef DECLARE_DO
153
154 private:
155 LanguageMode language_mode() const { return info()->language_mode(); }
156
157 Scope* scope() const { return scope_; }
158
159 void EmitClassOfTest(Label* if_true,
160 Label* if_false,
161 Handle<String> class_name,
162 Register input,
163 Register temporary,
164 Register temporary2);
165
Ben Murdoch097c5b22016-05-18 11:27:45 +0100166 bool HasAllocatedStackSlots() const {
167 return chunk()->HasAllocatedStackSlots();
168 }
169 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
170 int GetTotalFrameSlotCount() const {
171 return chunk()->GetTotalFrameSlotCount();
172 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173
174 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
175
176 // Code generation passes. Returns true if code generation should
177 // continue.
178 void GenerateBodyInstructionPre(LInstruction* instr) override;
179 void GenerateBodyInstructionPost(LInstruction* instr) override;
180 bool GeneratePrologue();
181 bool GenerateDeferredCode();
182 bool GenerateJumpTable();
183 bool GenerateSafepointTable();
184
185 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
186 void GenerateOsrPrologue();
187
188 enum SafepointMode {
189 RECORD_SIMPLE_SAFEPOINT,
190 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
191 };
192
193 void CallCode(Handle<Code> code,
194 RelocInfo::Mode mode,
195 LInstruction* instr);
196
197 void CallCodeGeneric(Handle<Code> code,
198 RelocInfo::Mode mode,
199 LInstruction* instr,
200 SafepointMode safepoint_mode);
201
202 void CallRuntime(const Runtime::Function* fun, int argc, LInstruction* instr,
203 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
204
205 void CallRuntime(Runtime::FunctionId id,
206 int argc,
207 LInstruction* instr) {
208 const Runtime::Function* function = Runtime::FunctionForId(id);
209 CallRuntime(function, argc, instr);
210 }
211
212 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
213 const Runtime::Function* function = Runtime::FunctionForId(id);
214 CallRuntime(function, function->nargs, instr);
215 }
216
217 void CallRuntimeFromDeferred(Runtime::FunctionId id,
218 int argc,
219 LInstruction* instr,
220 LOperand* context);
221
222 void LoadContextFromDeferred(LOperand* context);
223
224 // Generate a direct call to a known function. Expects the function
225 // to be in edi.
226 void CallKnownFunction(Handle<JSFunction> function,
227 int formal_parameter_count, int arity,
228 LInstruction* instr);
229
230 void RecordSafepointWithLazyDeopt(LInstruction* instr,
231 SafepointMode safepoint_mode);
232
233 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
234 Safepoint::DeoptMode mode);
235 void DeoptimizeIf(Condition cc, LInstruction* instr,
236 Deoptimizer::DeoptReason deopt_reason,
237 Deoptimizer::BailoutType bailout_type);
238 void DeoptimizeIf(Condition cc, LInstruction* instr,
239 Deoptimizer::DeoptReason deopt_reason);
240
241 bool DeoptEveryNTimes() {
242 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
243 }
244
245 void AddToTranslation(LEnvironment* environment,
246 Translation* translation,
247 LOperand* op,
248 bool is_tagged,
249 bool is_uint32,
250 int* object_index_pointer,
251 int* dematerialized_index_pointer);
252
253 Register ToRegister(int index) const;
254 X87Register ToX87Register(int index) const;
255 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
256 int32_t ToInteger32(LConstantOperand* op) const;
257 ExternalReference ToExternalReference(LConstantOperand* op) const;
258
259 Operand BuildFastArrayOperand(LOperand* elements_pointer,
260 LOperand* key,
261 Representation key_representation,
262 ElementsKind elements_kind,
263 uint32_t base_offset);
264
265 Operand BuildSeqStringOperand(Register string,
266 LOperand* index,
267 String::Encoding encoding);
268
269 void EmitIntegerMathAbs(LMathAbs* instr);
270
271 // Support for recording safepoint and position information.
272 void RecordSafepoint(LPointerMap* pointers,
273 Safepoint::Kind kind,
274 int arguments,
275 Safepoint::DeoptMode mode);
276 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
277 void RecordSafepoint(Safepoint::DeoptMode mode);
278 void RecordSafepointWithRegisters(LPointerMap* pointers,
279 int arguments,
280 Safepoint::DeoptMode mode);
281
282 void RecordAndWritePosition(int position) override;
283
284 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
285 void EmitGoto(int block);
286
287 // EmitBranch expects to be the last instruction of a block.
288 template<class InstrType>
289 void EmitBranch(InstrType instr, Condition cc);
290 template <class InstrType>
291 void EmitTrueBranch(InstrType instr, Condition cc);
292 template <class InstrType>
293 void EmitFalseBranch(InstrType instr, Condition cc);
294 void EmitNumberUntagDNoSSE2(LNumberUntagD* instr, Register input,
295 Register temp, X87Register res_reg,
296 NumberUntagDMode mode);
297
298 // Emits optimized code for typeof x == "y". Modifies input register.
299 // Returns the condition on which a final split to
300 // true and false label should be made, to optimize fallthrough.
301 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
302
303 // Emits optimized code for %_IsString(x). Preserves input register.
304 // Returns the condition on which a final split to
305 // true and false label should be made, to optimize fallthrough.
306 Condition EmitIsString(Register input,
307 Register temp1,
308 Label* is_not_string,
309 SmiCheck check_needed);
310
311 // Emits optimized code to deep-copy the contents of statically known
312 // object graphs (e.g. object literal boilerplate).
313 void EmitDeepCopy(Handle<JSObject> object,
314 Register result,
315 Register source,
316 int* offset,
317 AllocationSiteMode mode);
318
319 void EnsureSpaceForLazyDeopt(int space_needed) override;
320 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
321 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
322 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
323 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
324 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
325 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
326
327 template <class T>
328 void EmitVectorLoadICRegisters(T* instr);
329 template <class T>
330 void EmitVectorStoreICRegisters(T* instr);
331
332 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
333
334 // Emits code for pushing either a tagged constant, a (non-double)
335 // register, or a stack slot operand.
336 void EmitPushTaggedOperand(LOperand* operand);
337
338 void X87Fld(Operand src, X87OperandType opts);
339
340 void EmitFlushX87ForDeopt();
341 void FlushX87StackIfNecessary(LInstruction* instr) {
342 x87_stack_.FlushIfNecessary(instr, this);
343 }
344 friend class LGapResolver;
345
346#ifdef _MSC_VER
347 // On windows, you may not access the stack more than one page below
348 // the most recently mapped page. To make the allocated area randomly
349 // accessible, we write an arbitrary value to each page in range
350 // esp + offset - page_size .. esp in turn.
351 void MakeSureStackPagesMapped(int offset);
352#endif
353
354 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
355 Scope* const scope_;
356 ZoneList<LDeferredCode*> deferred_;
357 bool dynamic_frame_alignment_;
358 bool support_aligned_spilled_doubles_;
359 bool frame_is_built_;
360
361 class X87Stack : public ZoneObject {
362 public:
363 explicit X87Stack(MacroAssembler* masm)
364 : stack_depth_(0), is_mutable_(true), masm_(masm) { }
365 explicit X87Stack(const X87Stack& other)
366 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) {
367 for (int i = 0; i < stack_depth_; i++) {
368 stack_[i] = other.stack_[i];
369 }
370 }
371 bool operator==(const X87Stack& other) const {
372 if (stack_depth_ != other.stack_depth_) return false;
373 for (int i = 0; i < stack_depth_; i++) {
374 if (!stack_[i].is(other.stack_[i])) return false;
375 }
376 return true;
377 }
378 X87Stack& operator=(const X87Stack& other) {
379 stack_depth_ = other.stack_depth_;
380 for (int i = 0; i < stack_depth_; i++) {
381 stack_[i] = other.stack_[i];
382 }
383 return *this;
384 }
385 bool Contains(X87Register reg);
386 void Fxch(X87Register reg, int other_slot = 0);
387 void Free(X87Register reg);
388 void PrepareToWrite(X87Register reg);
389 void CommitWrite(X87Register reg);
390 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen);
391 void LeavingBlock(int current_block_id, LGoto* goto_instr, LCodeGen* cgen);
392 int depth() const { return stack_depth_; }
393 int GetLayout();
394 int st(X87Register reg) { return st2idx(ArrayIndex(reg)); }
395 void pop() {
396 DCHECK(is_mutable_);
397 USE(is_mutable_);
398 stack_depth_--;
399 }
400 void push(X87Register reg) {
401 DCHECK(is_mutable_);
402 DCHECK(stack_depth_ < X87Register::kMaxNumAllocatableRegisters);
403 stack_[stack_depth_] = reg;
404 stack_depth_++;
405 }
406
407 MacroAssembler* masm() const { return masm_; }
408 Isolate* isolate() const { return masm_->isolate(); }
409
410 private:
411 int ArrayIndex(X87Register reg);
412 int st2idx(int pos);
413
414 X87Register stack_[X87Register::kMaxNumAllocatableRegisters];
415 int stack_depth_;
416 bool is_mutable_;
417 MacroAssembler* masm_;
418 };
419 X87Stack x87_stack_;
420 // block_id -> X87Stack*;
421 typedef std::map<int, X87Stack*> X87StackMap;
422 X87StackMap x87_stack_map_;
423
424 // Builder that keeps track of safepoints in the code. The table
425 // itself is emitted at the end of the generated code.
426 SafepointTableBuilder safepoints_;
427
428 // Compiler from a set of parallel moves to a sequential list of moves.
429 LGapResolver resolver_;
430
431 Safepoint::Kind expected_safepoint_kind_;
432
433 class PushSafepointRegistersScope final BASE_EMBEDDED {
434 public:
435 explicit PushSafepointRegistersScope(LCodeGen* codegen)
436 : codegen_(codegen) {
437 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
438 codegen_->masm_->PushSafepointRegisters();
439 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
440 DCHECK(codegen_->info()->is_calling());
441 }
442
443 ~PushSafepointRegistersScope() {
444 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
445 codegen_->masm_->PopSafepointRegisters();
446 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
447 }
448
449 private:
450 LCodeGen* codegen_;
451 };
452
453 friend class LDeferredCode;
454 friend class LEnvironment;
455 friend class SafepointGenerator;
456 friend class X87Stack;
457 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
458};
459
460
461class LDeferredCode : public ZoneObject {
462 public:
463 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack)
464 : codegen_(codegen),
465 external_exit_(NULL),
466 instruction_index_(codegen->current_instruction_),
467 x87_stack_(x87_stack) {
468 codegen->AddDeferredCode(this);
469 }
470
471 virtual ~LDeferredCode() {}
472 virtual void Generate() = 0;
473 virtual LInstruction* instr() = 0;
474
475 void SetExit(Label* exit) { external_exit_ = exit; }
476 Label* entry() { return &entry_; }
477 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
478 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
479 int instruction_index() const { return instruction_index_; }
480 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; }
481
482 protected:
483 LCodeGen* codegen() const { return codegen_; }
484 MacroAssembler* masm() const { return codegen_->masm(); }
485
486 private:
487 LCodeGen* codegen_;
488 Label entry_;
489 Label exit_;
490 Label* external_exit_;
491 Label done_;
492 int instruction_index_;
493 LCodeGen::X87Stack x87_stack_;
494};
495
496} // namespace internal
497} // namespace v8
498
499#endif // V8_CRANKSHAFT_X87_LITHIUM_CODEGEN_X87_H_