blob: cf7de10394e01940752493c32fb875d4d98f59dc [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
6#define V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
7
8#include "src/crankshaft/arm64/lithium-arm64.h"
9
10#include "src/ast/scopes.h"
11#include "src/crankshaft/arm64/lithium-gap-resolver-arm64.h"
12#include "src/crankshaft/lithium-codegen.h"
13#include "src/deoptimizer.h"
14#include "src/safepoint-table.h"
15#include "src/utils.h"
16
17namespace v8 {
18namespace internal {
19
20// Forward declarations.
21class LDeferredCode;
22class SafepointGenerator;
23class BranchGenerator;
24
25class LCodeGen: public LCodeGenBase {
26 public:
27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28 : LCodeGenBase(chunk, assembler, info),
29 jump_table_(4, info->zone()),
30 scope_(info->scope()),
31 deferred_(8, info->zone()),
32 frame_is_built_(false),
33 safepoints_(info->zone()),
34 resolver_(this),
35 expected_safepoint_kind_(Safepoint::kSimple),
36 pushed_arguments_(0) {
37 PopulateDeoptimizationLiteralsWithInlinedFunctions();
38 }
39
40 // Simple accessors.
41 Scope* scope() const { return scope_; }
42
43 int LookupDestination(int block_id) const {
44 return chunk()->LookupDestination(block_id);
45 }
46
47 bool IsNextEmittedBlock(int block_id) const {
48 return LookupDestination(block_id) == GetNextEmittedBlock();
49 }
50
51 bool NeedsEagerFrame() const {
Ben Murdoch097c5b22016-05-18 11:27:45 +010052 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
53 !info()->IsStub() || info()->requires_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 }
55 bool NeedsDeferredFrame() const {
56 return !NeedsEagerFrame() && info()->is_deferred_calling();
57 }
58
59 LinkRegisterStatus GetLinkRegisterState() const {
60 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
61 }
62
63 // Try to generate code for the entire chunk, but it may fail if the
64 // chunk contains constructs we cannot handle. Returns true if the
65 // code generation attempt succeeded.
66 bool GenerateCode();
67
68 // Finish the code by setting stack height, safepoint, and bailout
69 // information on it.
70 void FinishCode(Handle<Code> code);
71
72 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
73 // Support for converting LOperands to assembler types.
74 Register ToRegister(LOperand* op) const;
75 Register ToRegister32(LOperand* op) const;
76 Operand ToOperand(LOperand* op);
77 Operand ToOperand32(LOperand* op);
78 enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
79 MemOperand ToMemOperand(LOperand* op,
80 StackMode stack_mode = kCanUseStackPointer) const;
81 Handle<Object> ToHandle(LConstantOperand* op) const;
82
83 template <class LI>
84 Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info);
85
86 int JSShiftAmountFromLConstant(LOperand* constant) {
87 return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
88 }
89
90 // TODO(jbramley): Examine these helpers and check that they make sense.
91 // IsInteger32Constant returns true for smi constants, for example.
92 bool IsInteger32Constant(LConstantOperand* op) const;
93 bool IsSmi(LConstantOperand* op) const;
94
95 int32_t ToInteger32(LConstantOperand* op) const;
96 Smi* ToSmi(LConstantOperand* op) const;
97 double ToDouble(LConstantOperand* op) const;
98 DoubleRegister ToDoubleRegister(LOperand* op) const;
99
100 // Declare methods that deal with the individual node types.
101#define DECLARE_DO(type) void Do##type(L##type* node);
102 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
103#undef DECLARE_DO
104
105 private:
106 // Return a double scratch register which can be used locally
107 // when generating code for a lithium instruction.
108 DoubleRegister double_scratch() { return crankshaft_fp_scratch; }
109
110 // Deferred code support.
111 void DoDeferredNumberTagD(LNumberTagD* instr);
112 void DoDeferredStackCheck(LStackCheck* instr);
113 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
114 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
115 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
116 void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
117 Label* exit,
118 Label* allocation_entry);
119
120 void DoDeferredNumberTagU(LInstruction* instr,
121 LOperand* value,
122 LOperand* temp1,
123 LOperand* temp2);
124 void DoDeferredTaggedToI(LTaggedToI* instr,
125 LOperand* value,
126 LOperand* temp1,
127 LOperand* temp2);
128 void DoDeferredAllocate(LAllocate* instr);
129 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
130 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
131 Register result,
132 Register object,
133 Register index);
134
135 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
136 void EmitGoto(int block);
137 void DoGap(LGap* instr);
138
139 // Generic version of EmitBranch. It contains some code to avoid emitting a
140 // branch on the next emitted basic block where we could just fall-through.
141 // You shouldn't use that directly but rather consider one of the helper like
142 // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
143 template<class InstrType>
144 void EmitBranchGeneric(InstrType instr,
145 const BranchGenerator& branch);
146
147 template<class InstrType>
148 void EmitBranch(InstrType instr, Condition condition);
149
150 template<class InstrType>
151 void EmitCompareAndBranch(InstrType instr,
152 Condition condition,
153 const Register& lhs,
154 const Operand& rhs);
155
156 template<class InstrType>
157 void EmitTestAndBranch(InstrType instr,
158 Condition condition,
159 const Register& value,
160 uint64_t mask);
161
162 template<class InstrType>
163 void EmitBranchIfNonZeroNumber(InstrType instr,
164 const FPRegister& value,
165 const FPRegister& scratch);
166
167 template<class InstrType>
168 void EmitBranchIfHeapNumber(InstrType instr,
169 const Register& value);
170
171 template<class InstrType>
172 void EmitBranchIfRoot(InstrType instr,
173 const Register& value,
174 Heap::RootListIndex index);
175
176 // Emits optimized code to deep-copy the contents of statically known object
177 // graphs (e.g. object literal boilerplate). Expects a pointer to the
178 // allocated destination object in the result register, and a pointer to the
179 // source object in the source register.
180 void EmitDeepCopy(Handle<JSObject> object,
181 Register result,
182 Register source,
183 Register scratch,
184 int* offset,
185 AllocationSiteMode mode);
186
187 template <class T>
188 void EmitVectorLoadICRegisters(T* instr);
189 template <class T>
190 void EmitVectorStoreICRegisters(T* instr);
191
192 // Emits optimized code for %_IsString(x). Preserves input register.
193 // Returns the condition on which a final split to
194 // true and false label should be made, to optimize fallthrough.
195 Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
196 SmiCheck check_needed);
197
198 MemOperand BuildSeqStringOperand(Register string,
199 Register temp,
200 LOperand* index,
201 String::Encoding encoding);
202 void DeoptimizeBranch(LInstruction* instr,
203 Deoptimizer::DeoptReason deopt_reason,
204 BranchType branch_type, Register reg = NoReg,
205 int bit = -1,
206 Deoptimizer::BailoutType* override_bailout_type = NULL);
207 void Deoptimize(LInstruction* instr, Deoptimizer::DeoptReason deopt_reason,
208 Deoptimizer::BailoutType* override_bailout_type = NULL);
209 void DeoptimizeIf(Condition cond, LInstruction* instr,
210 Deoptimizer::DeoptReason deopt_reason);
211 void DeoptimizeIfZero(Register rt, LInstruction* instr,
212 Deoptimizer::DeoptReason deopt_reason);
213 void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
214 Deoptimizer::DeoptReason deopt_reason);
215 void DeoptimizeIfNegative(Register rt, LInstruction* instr,
216 Deoptimizer::DeoptReason deopt_reason);
217 void DeoptimizeIfSmi(Register rt, LInstruction* instr,
218 Deoptimizer::DeoptReason deopt_reason);
219 void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
220 Deoptimizer::DeoptReason deopt_reason);
221 void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
222 LInstruction* instr,
223 Deoptimizer::DeoptReason deopt_reason);
224 void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
225 LInstruction* instr,
226 Deoptimizer::DeoptReason deopt_reason);
227 void DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr);
228 void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
229 Deoptimizer::DeoptReason deopt_reason);
230 void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
231 Deoptimizer::DeoptReason deopt_reason);
232 void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
233 Deoptimizer::DeoptReason deopt_reason);
234
235 MemOperand PrepareKeyedExternalArrayOperand(Register key,
236 Register base,
237 Register scratch,
238 bool key_is_smi,
239 bool key_is_constant,
240 int constant_key,
241 ElementsKind elements_kind,
242 int base_offset);
243 MemOperand PrepareKeyedArrayOperand(Register base,
244 Register elements,
245 Register key,
246 bool key_is_tagged,
247 ElementsKind elements_kind,
248 Representation representation,
249 int base_offset);
250
251 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
252 Safepoint::DeoptMode mode);
253
Ben Murdoch097c5b22016-05-18 11:27:45 +0100254 bool HasAllocatedStackSlots() const {
255 return chunk()->HasAllocatedStackSlots();
256 }
257 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
258 int GetTotalFrameSlotCount() const {
259 return chunk()->GetTotalFrameSlotCount();
260 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261
262 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
263
264 // Emit frame translation commands for an environment.
265 void WriteTranslation(LEnvironment* environment, Translation* translation);
266
267 void AddToTranslation(LEnvironment* environment,
268 Translation* translation,
269 LOperand* op,
270 bool is_tagged,
271 bool is_uint32,
272 int* object_index_pointer,
273 int* dematerialized_index_pointer);
274
275 void SaveCallerDoubles();
276 void RestoreCallerDoubles();
277
278 // Code generation steps. Returns true if code generation should continue.
279 void GenerateBodyInstructionPre(LInstruction* instr) override;
280 bool GeneratePrologue();
281 bool GenerateDeferredCode();
282 bool GenerateJumpTable();
283 bool GenerateSafepointTable();
284
285 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
286 void GenerateOsrPrologue();
287
288 enum SafepointMode {
289 RECORD_SIMPLE_SAFEPOINT,
290 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
291 };
292
293 void CallCode(Handle<Code> code,
294 RelocInfo::Mode mode,
295 LInstruction* instr);
296
297 void CallCodeGeneric(Handle<Code> code,
298 RelocInfo::Mode mode,
299 LInstruction* instr,
300 SafepointMode safepoint_mode);
301
302 void CallRuntime(const Runtime::Function* function,
303 int num_arguments,
304 LInstruction* instr,
305 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
306
307 void CallRuntime(Runtime::FunctionId id,
308 int num_arguments,
309 LInstruction* instr) {
310 const Runtime::Function* function = Runtime::FunctionForId(id);
311 CallRuntime(function, num_arguments, instr);
312 }
313
314 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
315 const Runtime::Function* function = Runtime::FunctionForId(id);
316 CallRuntime(function, function->nargs, instr);
317 }
318
319 void LoadContextFromDeferred(LOperand* context);
320 void CallRuntimeFromDeferred(Runtime::FunctionId id,
321 int argc,
322 LInstruction* instr,
323 LOperand* context);
324
325 // Generate a direct call to a known function. Expects the function
326 // to be in x1.
327 void CallKnownFunction(Handle<JSFunction> function,
328 int formal_parameter_count, int arity,
329 LInstruction* instr);
330
331 // Support for recording safepoint and position information.
332 void RecordAndWritePosition(int position) override;
333 void RecordSafepoint(LPointerMap* pointers,
334 Safepoint::Kind kind,
335 int arguments,
336 Safepoint::DeoptMode mode);
337 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
338 void RecordSafepoint(Safepoint::DeoptMode mode);
339 void RecordSafepointWithRegisters(LPointerMap* pointers,
340 int arguments,
341 Safepoint::DeoptMode mode);
342 void RecordSafepointWithLazyDeopt(LInstruction* instr,
343 SafepointMode safepoint_mode);
344
345 void EnsureSpaceForLazyDeopt(int space_needed) override;
346
347 ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
348 Scope* const scope_;
349 ZoneList<LDeferredCode*> deferred_;
350 bool frame_is_built_;
351
352 // Builder that keeps track of safepoints in the code. The table itself is
353 // emitted at the end of the generated code.
354 SafepointTableBuilder safepoints_;
355
356 // Compiler from a set of parallel moves to a sequential list of moves.
357 LGapResolver resolver_;
358
359 Safepoint::Kind expected_safepoint_kind_;
360
361 // The number of arguments pushed onto the stack, either by this block or by a
362 // predecessor.
363 int pushed_arguments_;
364
365 void RecordPushedArgumentsDelta(int delta) {
366 pushed_arguments_ += delta;
367 DCHECK(pushed_arguments_ >= 0);
368 }
369
370 int old_position_;
371
372 class PushSafepointRegistersScope BASE_EMBEDDED {
373 public:
374 explicit PushSafepointRegistersScope(LCodeGen* codegen)
375 : codegen_(codegen) {
376 DCHECK(codegen_->info()->is_calling());
377 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
378 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
379
380 UseScratchRegisterScope temps(codegen_->masm_);
381 // Preserve the value of lr which must be saved on the stack (the call to
382 // the stub will clobber it).
383 Register to_be_pushed_lr =
384 temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
385 codegen_->masm_->Mov(to_be_pushed_lr, lr);
386 StoreRegistersStateStub stub(codegen_->isolate());
387 codegen_->masm_->CallStub(&stub);
388 }
389
390 ~PushSafepointRegistersScope() {
391 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
392 RestoreRegistersStateStub stub(codegen_->isolate());
393 codegen_->masm_->CallStub(&stub);
394 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
395 }
396
397 private:
398 LCodeGen* codegen_;
399 };
400
401 friend class LDeferredCode;
402 friend class SafepointGenerator;
403 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
404};
405
406
407class LDeferredCode: public ZoneObject {
408 public:
409 explicit LDeferredCode(LCodeGen* codegen)
410 : codegen_(codegen),
411 external_exit_(NULL),
412 instruction_index_(codegen->current_instruction_) {
413 codegen->AddDeferredCode(this);
414 }
415
416 virtual ~LDeferredCode() { }
417 virtual void Generate() = 0;
418 virtual LInstruction* instr() = 0;
419
420 void SetExit(Label* exit) { external_exit_ = exit; }
421 Label* entry() { return &entry_; }
422 Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
423 int instruction_index() const { return instruction_index_; }
424
425 protected:
426 LCodeGen* codegen() const { return codegen_; }
427 MacroAssembler* masm() const { return codegen_->masm(); }
428
429 private:
430 LCodeGen* codegen_;
431 Label entry_;
432 Label exit_;
433 Label* external_exit_;
434 int instruction_index_;
435};
436
437
438// This is the abstract class used by EmitBranchGeneric.
439// It is used to emit code for conditional branching. The Emit() function
440// emits code to branch when the condition holds and EmitInverted() emits
441// the branch when the inverted condition is verified.
442//
443// For actual examples of condition see the concrete implementation in
444// lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
445class BranchGenerator BASE_EMBEDDED {
446 public:
447 explicit BranchGenerator(LCodeGen* codegen)
448 : codegen_(codegen) { }
449
450 virtual ~BranchGenerator() { }
451
452 virtual void Emit(Label* label) const = 0;
453 virtual void EmitInverted(Label* label) const = 0;
454
455 protected:
456 MacroAssembler* masm() const { return codegen_->masm(); }
457
458 LCodeGen* codegen_;
459};
460
461} // namespace internal
462} // namespace v8
463
464#endif // V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_