blob: 5084819dced5da952c0c14800fa4e247c5edcf32 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_
6#define V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_
7
8#include "src/ast/scopes.h"
9#include "src/base/logging.h"
10#include "src/crankshaft/ia32/lithium-gap-resolver-ia32.h"
11#include "src/crankshaft/ia32/lithium-ia32.h"
12#include "src/crankshaft/lithium-codegen.h"
13#include "src/deoptimizer.h"
14#include "src/safepoint-table.h"
15#include "src/utils.h"
16
17namespace v8 {
18namespace internal {
19
20// Forward declarations.
21class LDeferredCode;
22class LGapNode;
23class SafepointGenerator;
24
25class LCodeGen: public LCodeGenBase {
26 public:
27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28 : LCodeGenBase(chunk, assembler, info),
29 jump_table_(4, info->zone()),
30 scope_(info->scope()),
31 deferred_(8, info->zone()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 frame_is_built_(false),
33 safepoints_(info->zone()),
34 resolver_(this),
35 expected_safepoint_kind_(Safepoint::kSimple) {
36 PopulateDeoptimizationLiteralsWithInlinedFunctions();
37 }
38
39 int LookupDestination(int block_id) const {
40 return chunk()->LookupDestination(block_id);
41 }
42
43 bool IsNextEmittedBlock(int block_id) const {
44 return LookupDestination(block_id) == GetNextEmittedBlock();
45 }
46
47 bool NeedsEagerFrame() const {
Ben Murdoch097c5b22016-05-18 11:27:45 +010048 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
49 !info()->IsStub() || info()->requires_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050 }
51 bool NeedsDeferredFrame() const {
52 return !NeedsEagerFrame() && info()->is_deferred_calling();
53 }
54
55 // Support for converting LOperands to assembler types.
56 Operand ToOperand(LOperand* op) const;
57 Register ToRegister(LOperand* op) const;
58 XMMRegister ToDoubleRegister(LOperand* op) const;
59
60 bool IsInteger32(LConstantOperand* op) const;
61 bool IsSmi(LConstantOperand* op) const;
62 Immediate ToImmediate(LOperand* op, const Representation& r) const {
63 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
64 }
65 double ToDouble(LConstantOperand* op) const;
66
67 Handle<Object> ToHandle(LConstantOperand* op) const;
68
69 // The operand denoting the second word (the one with a higher address) of
70 // a double stack slot.
71 Operand HighOperand(LOperand* op);
72
73 // Try to generate code for the entire chunk, but it may fail if the
74 // chunk contains constructs we cannot handle. Returns true if the
75 // code generation attempt succeeded.
76 bool GenerateCode();
77
78 // Finish the code by setting stack height, safepoint, and bailout
79 // information on it.
80 void FinishCode(Handle<Code> code);
81
82 // Deferred code support.
83 void DoDeferredNumberTagD(LNumberTagD* instr);
84
85 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
86 void DoDeferredNumberTagIU(LInstruction* instr,
87 LOperand* value,
88 LOperand* temp,
89 IntegerSignedness signedness);
90
91 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
92 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
93 void DoDeferredStackCheck(LStackCheck* instr);
94 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
95 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
96 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
97 void DoDeferredAllocate(LAllocate* instr);
98 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
99 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
100 Register object,
101 Register index);
102
103 // Parallel move support.
104 void DoParallelMove(LParallelMove* move);
105 void DoGap(LGap* instr);
106
107 // Emit frame translation commands for an environment.
108 void WriteTranslation(LEnvironment* environment, Translation* translation);
109
110 void EnsureRelocSpaceForDeoptimization();
111
112 // Declare methods that deal with the individual node types.
113#define DECLARE_DO(type) void Do##type(L##type* node);
114 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
115#undef DECLARE_DO
116
117 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 Scope* scope() const { return scope_; }
119
120 XMMRegister double_scratch0() const { return xmm0; }
121
122 void EmitClassOfTest(Label* if_true,
123 Label* if_false,
124 Handle<String> class_name,
125 Register input,
126 Register temporary,
127 Register temporary2);
128
Ben Murdoch097c5b22016-05-18 11:27:45 +0100129 bool HasAllocatedStackSlots() const {
130 return chunk()->HasAllocatedStackSlots();
131 }
132 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
133 int GetTotalFrameSlotCount() const {
134 return chunk()->GetTotalFrameSlotCount();
135 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000136
137 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
138
139 void SaveCallerDoubles();
140 void RestoreCallerDoubles();
141
142 // Code generation passes. Returns true if code generation should
143 // continue.
144 void GenerateBodyInstructionPre(LInstruction* instr) override;
145 void GenerateBodyInstructionPost(LInstruction* instr) override;
146 bool GeneratePrologue();
147 bool GenerateDeferredCode();
148 bool GenerateJumpTable();
149 bool GenerateSafepointTable();
150
151 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
152 void GenerateOsrPrologue();
153
154 enum SafepointMode {
155 RECORD_SIMPLE_SAFEPOINT,
156 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
157 };
158
159 void CallCode(Handle<Code> code,
160 RelocInfo::Mode mode,
161 LInstruction* instr);
162
163 void CallCodeGeneric(Handle<Code> code,
164 RelocInfo::Mode mode,
165 LInstruction* instr,
166 SafepointMode safepoint_mode);
167
168 void CallRuntime(const Runtime::Function* fun,
169 int argc,
170 LInstruction* instr,
171 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
172
173 void CallRuntime(Runtime::FunctionId id,
174 int argc,
175 LInstruction* instr) {
176 const Runtime::Function* function = Runtime::FunctionForId(id);
177 CallRuntime(function, argc, instr);
178 }
179
180 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
181 const Runtime::Function* function = Runtime::FunctionForId(id);
182 CallRuntime(function, function->nargs, instr);
183 }
184
185 void CallRuntimeFromDeferred(Runtime::FunctionId id,
186 int argc,
187 LInstruction* instr,
188 LOperand* context);
189
190 void LoadContextFromDeferred(LOperand* context);
191
Ben Murdochda12d292016-06-02 14:46:10 +0100192 void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
193 Register scratch2, Register scratch3);
194
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 // Generate a direct call to a known function. Expects the function
196 // to be in edi.
197 void CallKnownFunction(Handle<JSFunction> function,
198 int formal_parameter_count, int arity,
Ben Murdochda12d292016-06-02 14:46:10 +0100199 bool is_tail_call, LInstruction* instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200
201 void RecordSafepointWithLazyDeopt(LInstruction* instr,
202 SafepointMode safepoint_mode);
203
204 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
205 Safepoint::DeoptMode mode);
206 void DeoptimizeIf(Condition cc, LInstruction* instr,
207 Deoptimizer::DeoptReason deopt_reason,
208 Deoptimizer::BailoutType bailout_type);
209 void DeoptimizeIf(Condition cc, LInstruction* instr,
210 Deoptimizer::DeoptReason deopt_reason);
211
212 bool DeoptEveryNTimes() {
213 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
214 }
215
216 void AddToTranslation(LEnvironment* environment,
217 Translation* translation,
218 LOperand* op,
219 bool is_tagged,
220 bool is_uint32,
221 int* object_index_pointer,
222 int* dematerialized_index_pointer);
223
224 Register ToRegister(int index) const;
225 XMMRegister ToDoubleRegister(int index) const;
226 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
227 int32_t ToInteger32(LConstantOperand* op) const;
228 ExternalReference ToExternalReference(LConstantOperand* op) const;
229
230 Operand BuildFastArrayOperand(LOperand* elements_pointer,
231 LOperand* key,
232 Representation key_representation,
233 ElementsKind elements_kind,
234 uint32_t base_offset);
235
236 Operand BuildSeqStringOperand(Register string,
237 LOperand* index,
238 String::Encoding encoding);
239
240 void EmitIntegerMathAbs(LMathAbs* instr);
241
242 // Support for recording safepoint and position information.
243 void RecordSafepoint(LPointerMap* pointers,
244 Safepoint::Kind kind,
245 int arguments,
246 Safepoint::DeoptMode mode);
247 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
248 void RecordSafepoint(Safepoint::DeoptMode mode);
249 void RecordSafepointWithRegisters(LPointerMap* pointers,
250 int arguments,
251 Safepoint::DeoptMode mode);
252
253 void RecordAndWritePosition(int position) override;
254
255 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
256 void EmitGoto(int block);
257
258 // EmitBranch expects to be the last instruction of a block.
259 template<class InstrType>
260 void EmitBranch(InstrType instr, Condition cc);
261 template <class InstrType>
262 void EmitTrueBranch(InstrType instr, Condition cc);
263 template <class InstrType>
264 void EmitFalseBranch(InstrType instr, Condition cc);
265 void EmitNumberUntagD(LNumberUntagD* instr, Register input, Register temp,
266 XMMRegister result, NumberUntagDMode mode);
267
268 // Emits optimized code for typeof x == "y". Modifies input register.
269 // Returns the condition on which a final split to
270 // true and false label should be made, to optimize fallthrough.
271 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
272
273 // Emits optimized code for %_IsString(x). Preserves input register.
274 // Returns the condition on which a final split to
275 // true and false label should be made, to optimize fallthrough.
276 Condition EmitIsString(Register input,
277 Register temp1,
278 Label* is_not_string,
279 SmiCheck check_needed);
280
281 // Emits optimized code to deep-copy the contents of statically known
282 // object graphs (e.g. object literal boilerplate).
283 void EmitDeepCopy(Handle<JSObject> object,
284 Register result,
285 Register source,
286 int* offset,
287 AllocationSiteMode mode);
288
289 void EnsureSpaceForLazyDeopt(int space_needed) override;
290 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
291 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
292 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
293 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
294 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
295 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
296
297 template <class T>
298 void EmitVectorLoadICRegisters(T* instr);
299 template <class T>
300 void EmitVectorStoreICRegisters(T* instr);
301
Ben Murdochda12d292016-06-02 14:46:10 +0100302 void EmitReturn(LReturn* instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303
304 // Emits code for pushing either a tagged constant, a (non-double)
305 // register, or a stack slot operand.
306 void EmitPushTaggedOperand(LOperand* operand);
307
308 friend class LGapResolver;
309
310#ifdef _MSC_VER
311 // On windows, you may not access the stack more than one page below
312 // the most recently mapped page. To make the allocated area randomly
313 // accessible, we write an arbitrary value to each page in range
314 // esp + offset - page_size .. esp in turn.
315 void MakeSureStackPagesMapped(int offset);
316#endif
317
318 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
319 Scope* const scope_;
320 ZoneList<LDeferredCode*> deferred_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 bool frame_is_built_;
322
323 // Builder that keeps track of safepoints in the code. The table
324 // itself is emitted at the end of the generated code.
325 SafepointTableBuilder safepoints_;
326
327 // Compiler from a set of parallel moves to a sequential list of moves.
328 LGapResolver resolver_;
329
330 Safepoint::Kind expected_safepoint_kind_;
331
332 class PushSafepointRegistersScope final BASE_EMBEDDED {
333 public:
334 explicit PushSafepointRegistersScope(LCodeGen* codegen)
335 : codegen_(codegen) {
336 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
337 codegen_->masm_->PushSafepointRegisters();
338 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
339 DCHECK(codegen_->info()->is_calling());
340 }
341
342 ~PushSafepointRegistersScope() {
343 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
344 codegen_->masm_->PopSafepointRegisters();
345 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
346 }
347
348 private:
349 LCodeGen* codegen_;
350 };
351
352 friend class LDeferredCode;
353 friend class LEnvironment;
354 friend class SafepointGenerator;
355 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
356};
357
358
359class LDeferredCode : public ZoneObject {
360 public:
361 explicit LDeferredCode(LCodeGen* codegen)
362 : codegen_(codegen),
363 external_exit_(NULL),
364 instruction_index_(codegen->current_instruction_) {
365 codegen->AddDeferredCode(this);
366 }
367
368 virtual ~LDeferredCode() {}
369 virtual void Generate() = 0;
370 virtual LInstruction* instr() = 0;
371
372 void SetExit(Label* exit) { external_exit_ = exit; }
373 Label* entry() { return &entry_; }
374 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
375 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
376 int instruction_index() const { return instruction_index_; }
377
378 protected:
379 LCodeGen* codegen() const { return codegen_; }
380 MacroAssembler* masm() const { return codegen_->masm(); }
381
382 private:
383 LCodeGen* codegen_;
384 Label entry_;
385 Label exit_;
386 Label* external_exit_;
387 Label done_;
388 int instruction_index_;
389};
390
391} // namespace internal
392} // namespace v8
393
394#endif // V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_