blob: f67ad5ab5dbb88126e1839cbcf05cbbd1d817433 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
6#define V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
7
8#include "src/crankshaft/arm64/lithium-arm64.h"
9
10#include "src/ast/scopes.h"
11#include "src/crankshaft/arm64/lithium-gap-resolver-arm64.h"
12#include "src/crankshaft/lithium-codegen.h"
13#include "src/deoptimizer.h"
14#include "src/safepoint-table.h"
15#include "src/utils.h"
16
17namespace v8 {
18namespace internal {
19
20// Forward declarations.
21class LDeferredCode;
22class SafepointGenerator;
23class BranchGenerator;
24
25class LCodeGen: public LCodeGenBase {
26 public:
27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28 : LCodeGenBase(chunk, assembler, info),
29 jump_table_(4, info->zone()),
30 scope_(info->scope()),
31 deferred_(8, info->zone()),
32 frame_is_built_(false),
33 safepoints_(info->zone()),
34 resolver_(this),
35 expected_safepoint_kind_(Safepoint::kSimple),
36 pushed_arguments_(0) {
37 PopulateDeoptimizationLiteralsWithInlinedFunctions();
38 }
39
40 // Simple accessors.
41 Scope* scope() const { return scope_; }
42
43 int LookupDestination(int block_id) const {
44 return chunk()->LookupDestination(block_id);
45 }
46
47 bool IsNextEmittedBlock(int block_id) const {
48 return LookupDestination(block_id) == GetNextEmittedBlock();
49 }
50
51 bool NeedsEagerFrame() const {
Ben Murdoch097c5b22016-05-18 11:27:45 +010052 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
53 !info()->IsStub() || info()->requires_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 }
55 bool NeedsDeferredFrame() const {
56 return !NeedsEagerFrame() && info()->is_deferred_calling();
57 }
58
59 LinkRegisterStatus GetLinkRegisterState() const {
60 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
61 }
62
63 // Try to generate code for the entire chunk, but it may fail if the
64 // chunk contains constructs we cannot handle. Returns true if the
65 // code generation attempt succeeded.
66 bool GenerateCode();
67
68 // Finish the code by setting stack height, safepoint, and bailout
69 // information on it.
70 void FinishCode(Handle<Code> code);
71
72 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
73 // Support for converting LOperands to assembler types.
74 Register ToRegister(LOperand* op) const;
75 Register ToRegister32(LOperand* op) const;
76 Operand ToOperand(LOperand* op);
77 Operand ToOperand32(LOperand* op);
78 enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
79 MemOperand ToMemOperand(LOperand* op,
80 StackMode stack_mode = kCanUseStackPointer) const;
81 Handle<Object> ToHandle(LConstantOperand* op) const;
82
83 template <class LI>
84 Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info);
85
86 int JSShiftAmountFromLConstant(LOperand* constant) {
87 return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
88 }
89
90 // TODO(jbramley): Examine these helpers and check that they make sense.
91 // IsInteger32Constant returns true for smi constants, for example.
92 bool IsInteger32Constant(LConstantOperand* op) const;
93 bool IsSmi(LConstantOperand* op) const;
94
95 int32_t ToInteger32(LConstantOperand* op) const;
96 Smi* ToSmi(LConstantOperand* op) const;
97 double ToDouble(LConstantOperand* op) const;
98 DoubleRegister ToDoubleRegister(LOperand* op) const;
99
100 // Declare methods that deal with the individual node types.
101#define DECLARE_DO(type) void Do##type(L##type* node);
102 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
103#undef DECLARE_DO
104
105 private:
106 // Return a double scratch register which can be used locally
107 // when generating code for a lithium instruction.
108 DoubleRegister double_scratch() { return crankshaft_fp_scratch; }
109
110 // Deferred code support.
111 void DoDeferredNumberTagD(LNumberTagD* instr);
112 void DoDeferredStackCheck(LStackCheck* instr);
113 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
114 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
115 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
116 void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
117 Label* exit,
118 Label* allocation_entry);
119
120 void DoDeferredNumberTagU(LInstruction* instr,
121 LOperand* value,
122 LOperand* temp1,
123 LOperand* temp2);
124 void DoDeferredTaggedToI(LTaggedToI* instr,
125 LOperand* value,
126 LOperand* temp1,
127 LOperand* temp2);
128 void DoDeferredAllocate(LAllocate* instr);
129 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
130 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
131 Register result,
132 Register object,
133 Register index);
134
135 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
136 void EmitGoto(int block);
137 void DoGap(LGap* instr);
138
139 // Generic version of EmitBranch. It contains some code to avoid emitting a
140 // branch on the next emitted basic block where we could just fall-through.
141 // You shouldn't use that directly but rather consider one of the helper like
142 // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
143 template<class InstrType>
144 void EmitBranchGeneric(InstrType instr,
145 const BranchGenerator& branch);
146
147 template<class InstrType>
148 void EmitBranch(InstrType instr, Condition condition);
149
150 template<class InstrType>
151 void EmitCompareAndBranch(InstrType instr,
152 Condition condition,
153 const Register& lhs,
154 const Operand& rhs);
155
156 template<class InstrType>
157 void EmitTestAndBranch(InstrType instr,
158 Condition condition,
159 const Register& value,
160 uint64_t mask);
161
162 template<class InstrType>
163 void EmitBranchIfNonZeroNumber(InstrType instr,
164 const FPRegister& value,
165 const FPRegister& scratch);
166
167 template<class InstrType>
168 void EmitBranchIfHeapNumber(InstrType instr,
169 const Register& value);
170
171 template<class InstrType>
172 void EmitBranchIfRoot(InstrType instr,
173 const Register& value,
174 Heap::RootListIndex index);
175
176 // Emits optimized code to deep-copy the contents of statically known object
177 // graphs (e.g. object literal boilerplate). Expects a pointer to the
178 // allocated destination object in the result register, and a pointer to the
179 // source object in the source register.
180 void EmitDeepCopy(Handle<JSObject> object,
181 Register result,
182 Register source,
183 Register scratch,
184 int* offset,
185 AllocationSiteMode mode);
186
187 template <class T>
188 void EmitVectorLoadICRegisters(T* instr);
189 template <class T>
190 void EmitVectorStoreICRegisters(T* instr);
191
192 // Emits optimized code for %_IsString(x). Preserves input register.
193 // Returns the condition on which a final split to
194 // true and false label should be made, to optimize fallthrough.
195 Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
196 SmiCheck check_needed);
197
198 MemOperand BuildSeqStringOperand(Register string,
199 Register temp,
200 LOperand* index,
201 String::Encoding encoding);
202 void DeoptimizeBranch(LInstruction* instr,
203 Deoptimizer::DeoptReason deopt_reason,
204 BranchType branch_type, Register reg = NoReg,
205 int bit = -1,
206 Deoptimizer::BailoutType* override_bailout_type = NULL);
207 void Deoptimize(LInstruction* instr, Deoptimizer::DeoptReason deopt_reason,
208 Deoptimizer::BailoutType* override_bailout_type = NULL);
209 void DeoptimizeIf(Condition cond, LInstruction* instr,
210 Deoptimizer::DeoptReason deopt_reason);
211 void DeoptimizeIfZero(Register rt, LInstruction* instr,
212 Deoptimizer::DeoptReason deopt_reason);
213 void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
214 Deoptimizer::DeoptReason deopt_reason);
215 void DeoptimizeIfNegative(Register rt, LInstruction* instr,
216 Deoptimizer::DeoptReason deopt_reason);
217 void DeoptimizeIfSmi(Register rt, LInstruction* instr,
218 Deoptimizer::DeoptReason deopt_reason);
219 void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
220 Deoptimizer::DeoptReason deopt_reason);
221 void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
222 LInstruction* instr,
223 Deoptimizer::DeoptReason deopt_reason);
224 void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
225 LInstruction* instr,
226 Deoptimizer::DeoptReason deopt_reason);
227 void DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr);
228 void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
229 Deoptimizer::DeoptReason deopt_reason);
230 void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
231 Deoptimizer::DeoptReason deopt_reason);
232 void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
233 Deoptimizer::DeoptReason deopt_reason);
234
235 MemOperand PrepareKeyedExternalArrayOperand(Register key,
236 Register base,
237 Register scratch,
238 bool key_is_smi,
239 bool key_is_constant,
240 int constant_key,
241 ElementsKind elements_kind,
242 int base_offset);
243 MemOperand PrepareKeyedArrayOperand(Register base,
244 Register elements,
245 Register key,
246 bool key_is_tagged,
247 ElementsKind elements_kind,
248 Representation representation,
249 int base_offset);
250
251 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
252 Safepoint::DeoptMode mode);
253
Ben Murdoch097c5b22016-05-18 11:27:45 +0100254 bool HasAllocatedStackSlots() const {
255 return chunk()->HasAllocatedStackSlots();
256 }
257 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
258 int GetTotalFrameSlotCount() const {
259 return chunk()->GetTotalFrameSlotCount();
260 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261
262 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
263
264 // Emit frame translation commands for an environment.
265 void WriteTranslation(LEnvironment* environment, Translation* translation);
266
267 void AddToTranslation(LEnvironment* environment,
268 Translation* translation,
269 LOperand* op,
270 bool is_tagged,
271 bool is_uint32,
272 int* object_index_pointer,
273 int* dematerialized_index_pointer);
274
275 void SaveCallerDoubles();
276 void RestoreCallerDoubles();
277
278 // Code generation steps. Returns true if code generation should continue.
279 void GenerateBodyInstructionPre(LInstruction* instr) override;
280 bool GeneratePrologue();
281 bool GenerateDeferredCode();
282 bool GenerateJumpTable();
283 bool GenerateSafepointTable();
284
285 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
286 void GenerateOsrPrologue();
287
288 enum SafepointMode {
289 RECORD_SIMPLE_SAFEPOINT,
290 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
291 };
292
293 void CallCode(Handle<Code> code,
294 RelocInfo::Mode mode,
295 LInstruction* instr);
296
297 void CallCodeGeneric(Handle<Code> code,
298 RelocInfo::Mode mode,
299 LInstruction* instr,
300 SafepointMode safepoint_mode);
301
302 void CallRuntime(const Runtime::Function* function,
303 int num_arguments,
304 LInstruction* instr,
305 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
306
307 void CallRuntime(Runtime::FunctionId id,
308 int num_arguments,
309 LInstruction* instr) {
310 const Runtime::Function* function = Runtime::FunctionForId(id);
311 CallRuntime(function, num_arguments, instr);
312 }
313
314 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
315 const Runtime::Function* function = Runtime::FunctionForId(id);
316 CallRuntime(function, function->nargs, instr);
317 }
318
319 void LoadContextFromDeferred(LOperand* context);
320 void CallRuntimeFromDeferred(Runtime::FunctionId id,
321 int argc,
322 LInstruction* instr,
323 LOperand* context);
324
Ben Murdochda12d292016-06-02 14:46:10 +0100325 void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
326 Register scratch2, Register scratch3);
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 // Generate a direct call to a known function. Expects the function
329 // to be in x1.
330 void CallKnownFunction(Handle<JSFunction> function,
331 int formal_parameter_count, int arity,
Ben Murdochda12d292016-06-02 14:46:10 +0100332 bool is_tail_call, LInstruction* instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333
334 // Support for recording safepoint and position information.
335 void RecordAndWritePosition(int position) override;
336 void RecordSafepoint(LPointerMap* pointers,
337 Safepoint::Kind kind,
338 int arguments,
339 Safepoint::DeoptMode mode);
340 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
341 void RecordSafepoint(Safepoint::DeoptMode mode);
342 void RecordSafepointWithRegisters(LPointerMap* pointers,
343 int arguments,
344 Safepoint::DeoptMode mode);
345 void RecordSafepointWithLazyDeopt(LInstruction* instr,
346 SafepointMode safepoint_mode);
347
348 void EnsureSpaceForLazyDeopt(int space_needed) override;
349
350 ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
351 Scope* const scope_;
352 ZoneList<LDeferredCode*> deferred_;
353 bool frame_is_built_;
354
355 // Builder that keeps track of safepoints in the code. The table itself is
356 // emitted at the end of the generated code.
357 SafepointTableBuilder safepoints_;
358
359 // Compiler from a set of parallel moves to a sequential list of moves.
360 LGapResolver resolver_;
361
362 Safepoint::Kind expected_safepoint_kind_;
363
364 // The number of arguments pushed onto the stack, either by this block or by a
365 // predecessor.
366 int pushed_arguments_;
367
368 void RecordPushedArgumentsDelta(int delta) {
369 pushed_arguments_ += delta;
370 DCHECK(pushed_arguments_ >= 0);
371 }
372
373 int old_position_;
374
375 class PushSafepointRegistersScope BASE_EMBEDDED {
376 public:
377 explicit PushSafepointRegistersScope(LCodeGen* codegen)
378 : codegen_(codegen) {
379 DCHECK(codegen_->info()->is_calling());
380 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
381 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
382
383 UseScratchRegisterScope temps(codegen_->masm_);
384 // Preserve the value of lr which must be saved on the stack (the call to
385 // the stub will clobber it).
386 Register to_be_pushed_lr =
387 temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
388 codegen_->masm_->Mov(to_be_pushed_lr, lr);
389 StoreRegistersStateStub stub(codegen_->isolate());
390 codegen_->masm_->CallStub(&stub);
391 }
392
393 ~PushSafepointRegistersScope() {
394 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
395 RestoreRegistersStateStub stub(codegen_->isolate());
396 codegen_->masm_->CallStub(&stub);
397 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
398 }
399
400 private:
401 LCodeGen* codegen_;
402 };
403
404 friend class LDeferredCode;
405 friend class SafepointGenerator;
406 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
407};
408
409
410class LDeferredCode: public ZoneObject {
411 public:
412 explicit LDeferredCode(LCodeGen* codegen)
413 : codegen_(codegen),
414 external_exit_(NULL),
415 instruction_index_(codegen->current_instruction_) {
416 codegen->AddDeferredCode(this);
417 }
418
419 virtual ~LDeferredCode() { }
420 virtual void Generate() = 0;
421 virtual LInstruction* instr() = 0;
422
423 void SetExit(Label* exit) { external_exit_ = exit; }
424 Label* entry() { return &entry_; }
425 Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
426 int instruction_index() const { return instruction_index_; }
427
428 protected:
429 LCodeGen* codegen() const { return codegen_; }
430 MacroAssembler* masm() const { return codegen_->masm(); }
431
432 private:
433 LCodeGen* codegen_;
434 Label entry_;
435 Label exit_;
436 Label* external_exit_;
437 int instruction_index_;
438};
439
440
441// This is the abstract class used by EmitBranchGeneric.
442// It is used to emit code for conditional branching. The Emit() function
443// emits code to branch when the condition holds and EmitInverted() emits
444// the branch when the inverted condition is verified.
445//
446// For actual examples of condition see the concrete implementation in
447// lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
448class BranchGenerator BASE_EMBEDDED {
449 public:
450 explicit BranchGenerator(LCodeGen* codegen)
451 : codegen_(codegen) { }
452
453 virtual ~BranchGenerator() { }
454
455 virtual void Emit(Label* label) const = 0;
456 virtual void EmitInverted(Label* label) const = 0;
457
458 protected:
459 MacroAssembler* masm() const { return codegen_->masm(); }
460
461 LCodeGen* codegen_;
462};
463
464} // namespace internal
465} // namespace v8
466
467#endif // V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_