blob: 7a316e5957562dbb4ec1fcbf9a82c49137bb2a38 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_
6#define V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_
7
8#include "src/ast/scopes.h"
9#include "src/crankshaft/lithium-codegen.h"
10#include "src/crankshaft/mips/lithium-gap-resolver-mips.h"
11#include "src/crankshaft/mips/lithium-mips.h"
12#include "src/deoptimizer.h"
13#include "src/safepoint-table.h"
14#include "src/utils.h"
15
16namespace v8 {
17namespace internal {
18
19// Forward declarations.
20class LDeferredCode;
21class SafepointGenerator;
22
23class LCodeGen: public LCodeGenBase {
24 public:
25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26 : LCodeGenBase(chunk, assembler, info),
27 jump_table_(4, info->zone()),
28 scope_(info->scope()),
29 deferred_(8, info->zone()),
30 frame_is_built_(false),
31 safepoints_(info->zone()),
32 resolver_(this),
33 expected_safepoint_kind_(Safepoint::kSimple) {
34 PopulateDeoptimizationLiteralsWithInlinedFunctions();
35 }
36
37
38 int LookupDestination(int block_id) const {
39 return chunk()->LookupDestination(block_id);
40 }
41
42 bool IsNextEmittedBlock(int block_id) const {
43 return LookupDestination(block_id) == GetNextEmittedBlock();
44 }
45
46 bool NeedsEagerFrame() const {
Ben Murdoch097c5b22016-05-18 11:27:45 +010047 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
48 !info()->IsStub() || info()->requires_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049 }
50 bool NeedsDeferredFrame() const {
51 return !NeedsEagerFrame() && info()->is_deferred_calling();
52 }
53
54 RAStatus GetRAState() const {
55 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
56 }
57
58 // Support for converting LOperands to assembler types.
59 // LOperand must be a register.
60 Register ToRegister(LOperand* op) const;
61
62 // LOperand is loaded into scratch, unless already a register.
63 Register EmitLoadRegister(LOperand* op, Register scratch);
64
65 // LOperand must be a double register.
66 DoubleRegister ToDoubleRegister(LOperand* op) const;
67
68 // LOperand is loaded into dbl_scratch, unless already a double register.
69 DoubleRegister EmitLoadDoubleRegister(LOperand* op,
70 FloatRegister flt_scratch,
71 DoubleRegister dbl_scratch);
72 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
73 int32_t ToInteger32(LConstantOperand* op) const;
74 Smi* ToSmi(LConstantOperand* op) const;
75 double ToDouble(LConstantOperand* op) const;
76 Operand ToOperand(LOperand* op);
77 MemOperand ToMemOperand(LOperand* op) const;
78 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
79 MemOperand ToHighMemOperand(LOperand* op) const;
80
81 bool IsInteger32(LConstantOperand* op) const;
82 bool IsSmi(LConstantOperand* op) const;
83 Handle<Object> ToHandle(LConstantOperand* op) const;
84
85 // Try to generate code for the entire chunk, but it may fail if the
86 // chunk contains constructs we cannot handle. Returns true if the
87 // code generation attempt succeeded.
88 bool GenerateCode();
89
90 // Finish the code by setting stack height, safepoint, and bailout
91 // information on it.
92 void FinishCode(Handle<Code> code);
93
94 void DoDeferredNumberTagD(LNumberTagD* instr);
95
96 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
97 void DoDeferredNumberTagIU(LInstruction* instr,
98 LOperand* value,
99 LOperand* temp1,
100 LOperand* temp2,
101 IntegerSignedness signedness);
102
103 void DoDeferredTaggedToI(LTaggedToI* instr);
104 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
105 void DoDeferredStackCheck(LStackCheck* instr);
106 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
107 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
108 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
109 void DoDeferredAllocate(LAllocate* instr);
110 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
111 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
112 Register result,
113 Register object,
114 Register index);
115
116 // Parallel move support.
117 void DoParallelMove(LParallelMove* move);
118 void DoGap(LGap* instr);
119
120 MemOperand PrepareKeyedOperand(Register key,
121 Register base,
122 bool key_is_constant,
123 int constant_key,
124 int element_size,
125 int shift_size,
126 int base_offset);
127
128 // Emit frame translation commands for an environment.
129 void WriteTranslation(LEnvironment* environment, Translation* translation);
130
131 // Declare methods that deal with the individual node types.
132#define DECLARE_DO(type) void Do##type(L##type* node);
133 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
134#undef DECLARE_DO
135
136 private:
137 LanguageMode language_mode() const { return info()->language_mode(); }
138
139 Scope* scope() const { return scope_; }
140
141 Register scratch0() { return kLithiumScratchReg; }
142 Register scratch1() { return kLithiumScratchReg2; }
143 DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
144
145 LInstruction* GetNextInstruction();
146
147 void EmitClassOfTest(Label* if_true,
148 Label* if_false,
149 Handle<String> class_name,
150 Register input,
151 Register temporary,
152 Register temporary2);
153
Ben Murdoch097c5b22016-05-18 11:27:45 +0100154 bool HasAllocatedStackSlots() const {
155 return chunk()->HasAllocatedStackSlots();
156 }
157 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
158 int GetTotalFrameSlotCount() const {
159 return chunk()->GetTotalFrameSlotCount();
160 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161
162 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
163
164 void SaveCallerDoubles();
165 void RestoreCallerDoubles();
166
167 // Code generation passes. Returns true if code generation should
168 // continue.
169 void GenerateBodyInstructionPre(LInstruction* instr) override;
170 bool GeneratePrologue();
171 bool GenerateDeferredCode();
172 bool GenerateJumpTable();
173 bool GenerateSafepointTable();
174
175 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
176 void GenerateOsrPrologue();
177
178 enum SafepointMode {
179 RECORD_SIMPLE_SAFEPOINT,
180 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
181 };
182
183 void CallCode(Handle<Code> code,
184 RelocInfo::Mode mode,
185 LInstruction* instr);
186
187 void CallCodeGeneric(Handle<Code> code,
188 RelocInfo::Mode mode,
189 LInstruction* instr,
190 SafepointMode safepoint_mode);
191
192 void CallRuntime(const Runtime::Function* function,
193 int num_arguments,
194 LInstruction* instr,
195 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
196
197 void CallRuntime(Runtime::FunctionId id,
198 int num_arguments,
199 LInstruction* instr) {
200 const Runtime::Function* function = Runtime::FunctionForId(id);
201 CallRuntime(function, num_arguments, instr);
202 }
203
204 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
205 const Runtime::Function* function = Runtime::FunctionForId(id);
206 CallRuntime(function, function->nargs, instr);
207 }
208
209 void LoadContextFromDeferred(LOperand* context);
210 void CallRuntimeFromDeferred(Runtime::FunctionId id,
211 int argc,
212 LInstruction* instr,
213 LOperand* context);
214
Ben Murdochda12d292016-06-02 14:46:10 +0100215 void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
216 Register scratch2, Register scratch3);
217
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000218 // Generate a direct call to a known function. Expects the function
219 // to be in a1.
220 void CallKnownFunction(Handle<JSFunction> function,
221 int formal_parameter_count, int arity,
Ben Murdochda12d292016-06-02 14:46:10 +0100222 bool is_tail_call, LInstruction* instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000223
224 void RecordSafepointWithLazyDeopt(LInstruction* instr,
225 SafepointMode safepoint_mode);
226
227 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
228 Safepoint::DeoptMode mode);
229 void DeoptimizeIf(Condition condition, LInstruction* instr,
230 Deoptimizer::DeoptReason deopt_reason,
231 Deoptimizer::BailoutType bailout_type,
232 Register src1 = zero_reg,
233 const Operand& src2 = Operand(zero_reg));
234 void DeoptimizeIf(
235 Condition condition, LInstruction* instr,
236 Deoptimizer::DeoptReason deopt_reason = Deoptimizer::kNoReason,
237 Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg));
238
239 void AddToTranslation(LEnvironment* environment,
240 Translation* translation,
241 LOperand* op,
242 bool is_tagged,
243 bool is_uint32,
244 int* object_index_pointer,
245 int* dematerialized_index_pointer);
246
247 Register ToRegister(int index) const;
248 DoubleRegister ToDoubleRegister(int index) const;
249
250 MemOperand BuildSeqStringOperand(Register string,
251 LOperand* index,
252 String::Encoding encoding);
253
254 void EmitIntegerMathAbs(LMathAbs* instr);
255
256 // Support for recording safepoint and position information.
257 void RecordSafepoint(LPointerMap* pointers,
258 Safepoint::Kind kind,
259 int arguments,
260 Safepoint::DeoptMode mode);
261 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
262 void RecordSafepoint(Safepoint::DeoptMode mode);
263 void RecordSafepointWithRegisters(LPointerMap* pointers,
264 int arguments,
265 Safepoint::DeoptMode mode);
266
267 void RecordAndWritePosition(int position) override;
268
269 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
270 void EmitGoto(int block);
271
272 // EmitBranch expects to be the last instruction of a block.
273 template<class InstrType>
274 void EmitBranch(InstrType instr,
275 Condition condition,
276 Register src1,
277 const Operand& src2);
278 template<class InstrType>
279 void EmitBranchF(InstrType instr,
280 Condition condition,
281 FPURegister src1,
282 FPURegister src2);
283 template <class InstrType>
284 void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
285 const Operand& src2);
286 template <class InstrType>
287 void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
288 const Operand& src2);
289 template<class InstrType>
290 void EmitFalseBranchF(InstrType instr,
291 Condition condition,
292 FPURegister src1,
293 FPURegister src2);
294 void EmitCmpI(LOperand* left, LOperand* right);
295 void EmitNumberUntagD(LNumberUntagD* instr, Register input,
296 DoubleRegister result, NumberUntagDMode mode);
297
298 // Emits optimized code for typeof x == "y". Modifies input register.
299 // Returns the condition on which a final split to
300 // true and false label should be made, to optimize fallthrough.
301 // Returns two registers in cmp1 and cmp2 that can be used in the
302 // Branch instruction after EmitTypeofIs.
303 Condition EmitTypeofIs(Label* true_label,
304 Label* false_label,
305 Register input,
306 Handle<String> type_name,
307 Register* cmp1,
308 Operand* cmp2);
309
310 // Emits optimized code for %_IsString(x). Preserves input register.
311 // Returns the condition on which a final split to
312 // true and false label should be made, to optimize fallthrough.
313 Condition EmitIsString(Register input,
314 Register temp1,
315 Label* is_not_string,
316 SmiCheck check_needed);
317
318 // Emits optimized code to deep-copy the contents of statically known
319 // object graphs (e.g. object literal boilerplate).
320 void EmitDeepCopy(Handle<JSObject> object,
321 Register result,
322 Register source,
323 int* offset,
324 AllocationSiteMode mode);
325 // Emit optimized code for integer division.
326 // Inputs are signed.
327 // All registers are clobbered.
328 // If 'remainder' is no_reg, it is not computed.
329 void EmitSignedIntegerDivisionByConstant(Register result,
330 Register dividend,
331 int32_t divisor,
332 Register remainder,
333 Register scratch,
334 LEnvironment* environment);
335
336
337 void EnsureSpaceForLazyDeopt(int space_needed) override;
338 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
339 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
340 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
341 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
342 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
343 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
344
345 template <class T>
346 void EmitVectorLoadICRegisters(T* instr);
347 template <class T>
348 void EmitVectorStoreICRegisters(T* instr);
349
350 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
351 Scope* const scope_;
352 ZoneList<LDeferredCode*> deferred_;
353 bool frame_is_built_;
354
355 // Builder that keeps track of safepoints in the code. The table
356 // itself is emitted at the end of the generated code.
357 SafepointTableBuilder safepoints_;
358
359 // Compiler from a set of parallel moves to a sequential list of moves.
360 LGapResolver resolver_;
361
362 Safepoint::Kind expected_safepoint_kind_;
363
364 class PushSafepointRegistersScope final BASE_EMBEDDED {
365 public:
366 explicit PushSafepointRegistersScope(LCodeGen* codegen)
367 : codegen_(codegen) {
368 DCHECK(codegen_->info()->is_calling());
369 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
370 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
371
372 StoreRegistersStateStub stub(codegen_->isolate());
373 codegen_->masm_->push(ra);
374 codegen_->masm_->CallStub(&stub);
375 }
376
377 ~PushSafepointRegistersScope() {
378 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
379 RestoreRegistersStateStub stub(codegen_->isolate());
380 codegen_->masm_->push(ra);
381 codegen_->masm_->CallStub(&stub);
382 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
383 }
384
385 private:
386 LCodeGen* codegen_;
387 };
388
389 friend class LDeferredCode;
390 friend class LEnvironment;
391 friend class SafepointGenerator;
392 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
393};
394
395
396class LDeferredCode : public ZoneObject {
397 public:
398 explicit LDeferredCode(LCodeGen* codegen)
399 : codegen_(codegen),
400 external_exit_(NULL),
401 instruction_index_(codegen->current_instruction_) {
402 codegen->AddDeferredCode(this);
403 }
404
405 virtual ~LDeferredCode() {}
406 virtual void Generate() = 0;
407 virtual LInstruction* instr() = 0;
408
409 void SetExit(Label* exit) { external_exit_ = exit; }
410 Label* entry() { return &entry_; }
411 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
412 int instruction_index() const { return instruction_index_; }
413
414 protected:
415 LCodeGen* codegen() const { return codegen_; }
416 MacroAssembler* masm() const { return codegen_->masm(); }
417
418 private:
419 LCodeGen* codegen_;
420 Label entry_;
421 Label exit_;
422 Label* external_exit_;
423 int instruction_index_;
424};
425
426} // namespace internal
427} // namespace v8
428
429#endif // V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_