blob: ffc236dd798838a670360756908662cff1ae1cc5 [file] [log] [blame]
Ben Murdoch086aeea2011-05-13 15:57:08 +01001// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_LITHIUM_H_
29#define V8_LITHIUM_H_
30
Ben Murdoch257744e2011-11-30 15:57:28 +000031#include "allocation.h"
Ben Murdochb8e0da22011-05-16 14:20:40 +010032#include "hydrogen.h"
Ben Murdochb8e0da22011-05-16 14:20:40 +010033#include "safepoint-table.h"
Ben Murdoch086aeea2011-05-13 15:57:08 +010034
35namespace v8 {
36namespace internal {
37
Steve Block1e0659c2011-05-24 12:43:12 +010038class LOperand: public ZoneObject {
39 public:
40 enum Kind {
41 INVALID,
42 UNALLOCATED,
43 CONSTANT_OPERAND,
44 STACK_SLOT,
45 DOUBLE_STACK_SLOT,
46 REGISTER,
47 DOUBLE_REGISTER,
48 ARGUMENT
49 };
50
51 LOperand() : value_(KindField::encode(INVALID)) { }
52
53 Kind kind() const { return KindField::decode(value_); }
54 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
55 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
56 bool IsStackSlot() const { return kind() == STACK_SLOT; }
57 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
58 bool IsRegister() const { return kind() == REGISTER; }
59 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
60 bool IsArgument() const { return kind() == ARGUMENT; }
61 bool IsUnallocated() const { return kind() == UNALLOCATED; }
62 bool Equals(LOperand* other) const { return value_ == other->value_; }
63 int VirtualRegister();
64
65 void PrintTo(StringStream* stream);
66 void ConvertTo(Kind kind, int index) {
67 value_ = KindField::encode(kind);
68 value_ |= index << kKindFieldWidth;
69 ASSERT(this->index() == index);
70 }
71
72 protected:
73 static const int kKindFieldWidth = 3;
74 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
75
76 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
77
78 unsigned value_;
79};
80
81
82class LUnallocated: public LOperand {
83 public:
84 enum Policy {
85 NONE,
86 ANY,
87 FIXED_REGISTER,
88 FIXED_DOUBLE_REGISTER,
89 FIXED_SLOT,
90 MUST_HAVE_REGISTER,
91 WRITABLE_REGISTER,
92 SAME_AS_FIRST_INPUT,
93 IGNORE
94 };
95
96 // Lifetime of operand inside the instruction.
97 enum Lifetime {
98 // USED_AT_START operand is guaranteed to be live only at
99 // instruction start. Register allocator is free to assign the same register
100 // to some other operand used inside instruction (i.e. temporary or
101 // output).
102 USED_AT_START,
103
104 // USED_AT_END operand is treated as live until the end of
105 // instruction. This means that register allocator will not reuse it's
106 // register for any other operand inside instruction.
107 USED_AT_END
108 };
109
110 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
111 Initialize(policy, 0, USED_AT_END);
112 }
113
114 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
115 Initialize(policy, fixed_index, USED_AT_END);
116 }
117
118 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
119 Initialize(policy, 0, lifetime);
120 }
121
122 // The superclass has a KindField. Some policies have a signed fixed
123 // index in the upper bits.
124 static const int kPolicyWidth = 4;
125 static const int kLifetimeWidth = 1;
126 static const int kVirtualRegisterWidth = 17;
127
128 static const int kPolicyShift = kKindFieldWidth;
129 static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
130 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
131 static const int kFixedIndexShift =
132 kVirtualRegisterShift + kVirtualRegisterWidth;
133
134 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
135
136 class LifetimeField
137 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
138 };
139
140 class VirtualRegisterField
141 : public BitField<unsigned,
142 kVirtualRegisterShift,
143 kVirtualRegisterWidth> {
144 };
145
146 static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100147 static const int kMaxFixedIndex = 63;
148 static const int kMinFixedIndex = -64;
Steve Block1e0659c2011-05-24 12:43:12 +0100149
150 bool HasIgnorePolicy() const { return policy() == IGNORE; }
151 bool HasNoPolicy() const { return policy() == NONE; }
152 bool HasAnyPolicy() const {
153 return policy() == ANY;
154 }
155 bool HasFixedPolicy() const {
156 return policy() == FIXED_REGISTER ||
157 policy() == FIXED_DOUBLE_REGISTER ||
158 policy() == FIXED_SLOT;
159 }
160 bool HasRegisterPolicy() const {
161 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
162 }
163 bool HasSameAsInputPolicy() const {
164 return policy() == SAME_AS_FIRST_INPUT;
165 }
166 Policy policy() const { return PolicyField::decode(value_); }
167 void set_policy(Policy policy) {
168 value_ &= ~PolicyField::mask();
169 value_ |= PolicyField::encode(policy);
170 }
171 int fixed_index() const {
172 return static_cast<int>(value_) >> kFixedIndexShift;
173 }
174
175 unsigned virtual_register() const {
176 return VirtualRegisterField::decode(value_);
177 }
178
179 void set_virtual_register(unsigned id) {
180 value_ &= ~VirtualRegisterField::mask();
181 value_ |= VirtualRegisterField::encode(id);
182 }
183
184 LUnallocated* CopyUnconstrained() {
185 LUnallocated* result = new LUnallocated(ANY);
186 result->set_virtual_register(virtual_register());
187 return result;
188 }
189
190 static LUnallocated* cast(LOperand* op) {
191 ASSERT(op->IsUnallocated());
192 return reinterpret_cast<LUnallocated*>(op);
193 }
194
195 bool IsUsedAtStart() {
196 return LifetimeField::decode(value_) == USED_AT_START;
197 }
198
199 private:
200 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
201 value_ |= PolicyField::encode(policy);
202 value_ |= LifetimeField::encode(lifetime);
203 value_ |= fixed_index << kFixedIndexShift;
204 ASSERT(this->fixed_index() == fixed_index);
205 }
206};
207
208
209class LMoveOperands BASE_EMBEDDED {
210 public:
211 LMoveOperands(LOperand* source, LOperand* destination)
212 : source_(source), destination_(destination) {
213 }
214
215 LOperand* source() const { return source_; }
216 void set_source(LOperand* operand) { source_ = operand; }
217
218 LOperand* destination() const { return destination_; }
219 void set_destination(LOperand* operand) { destination_ = operand; }
220
221 // The gap resolver marks moves as "in-progress" by clearing the
222 // destination (but not the source).
223 bool IsPending() const {
224 return destination_ == NULL && source_ != NULL;
225 }
226
227 // True if this move a move into the given destination operand.
228 bool Blocks(LOperand* operand) const {
229 return !IsEliminated() && source()->Equals(operand);
230 }
231
232 // A move is redundant if it's been eliminated, if its source and
233 // destination are the same, or if its destination is unneeded.
234 bool IsRedundant() const {
235 return IsEliminated() || source_->Equals(destination_) || IsIgnored();
236 }
237
238 bool IsIgnored() const {
239 return destination_ != NULL &&
240 destination_->IsUnallocated() &&
241 LUnallocated::cast(destination_)->HasIgnorePolicy();
242 }
243
244 // We clear both operands to indicate move that's been eliminated.
245 void Eliminate() { source_ = destination_ = NULL; }
246 bool IsEliminated() const {
247 ASSERT(source_ != NULL || destination_ == NULL);
248 return source_ == NULL;
249 }
250
251 private:
252 LOperand* source_;
253 LOperand* destination_;
254};
255
256
257class LConstantOperand: public LOperand {
258 public:
259 static LConstantOperand* Create(int index) {
260 ASSERT(index >= 0);
261 if (index < kNumCachedOperands) return &cache[index];
262 return new LConstantOperand(index);
263 }
264
265 static LConstantOperand* cast(LOperand* op) {
266 ASSERT(op->IsConstantOperand());
267 return reinterpret_cast<LConstantOperand*>(op);
268 }
269
270 static void SetupCache();
271
272 private:
273 static const int kNumCachedOperands = 128;
274 static LConstantOperand cache[];
275
276 LConstantOperand() : LOperand() { }
277 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
278};
279
280
281class LArgument: public LOperand {
282 public:
283 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
284
285 static LArgument* cast(LOperand* op) {
286 ASSERT(op->IsArgument());
287 return reinterpret_cast<LArgument*>(op);
288 }
289};
290
291
292class LStackSlot: public LOperand {
293 public:
294 static LStackSlot* Create(int index) {
295 ASSERT(index >= 0);
296 if (index < kNumCachedOperands) return &cache[index];
297 return new LStackSlot(index);
298 }
299
300 static LStackSlot* cast(LOperand* op) {
301 ASSERT(op->IsStackSlot());
302 return reinterpret_cast<LStackSlot*>(op);
303 }
304
305 static void SetupCache();
306
307 private:
308 static const int kNumCachedOperands = 128;
309 static LStackSlot cache[];
310
311 LStackSlot() : LOperand() { }
312 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
313};
314
315
316class LDoubleStackSlot: public LOperand {
317 public:
318 static LDoubleStackSlot* Create(int index) {
319 ASSERT(index >= 0);
320 if (index < kNumCachedOperands) return &cache[index];
321 return new LDoubleStackSlot(index);
322 }
323
324 static LDoubleStackSlot* cast(LOperand* op) {
325 ASSERT(op->IsStackSlot());
326 return reinterpret_cast<LDoubleStackSlot*>(op);
327 }
328
329 static void SetupCache();
330
331 private:
332 static const int kNumCachedOperands = 128;
333 static LDoubleStackSlot cache[];
334
335 LDoubleStackSlot() : LOperand() { }
336 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
337};
338
339
340class LRegister: public LOperand {
341 public:
342 static LRegister* Create(int index) {
343 ASSERT(index >= 0);
344 if (index < kNumCachedOperands) return &cache[index];
345 return new LRegister(index);
346 }
347
348 static LRegister* cast(LOperand* op) {
349 ASSERT(op->IsRegister());
350 return reinterpret_cast<LRegister*>(op);
351 }
352
353 static void SetupCache();
354
355 private:
356 static const int kNumCachedOperands = 16;
357 static LRegister cache[];
358
359 LRegister() : LOperand() { }
360 explicit LRegister(int index) : LOperand(REGISTER, index) { }
361};
362
363
364class LDoubleRegister: public LOperand {
365 public:
366 static LDoubleRegister* Create(int index) {
367 ASSERT(index >= 0);
368 if (index < kNumCachedOperands) return &cache[index];
369 return new LDoubleRegister(index);
370 }
371
372 static LDoubleRegister* cast(LOperand* op) {
373 ASSERT(op->IsDoubleRegister());
374 return reinterpret_cast<LDoubleRegister*>(op);
375 }
376
377 static void SetupCache();
378
379 private:
380 static const int kNumCachedOperands = 16;
381 static LDoubleRegister cache[];
382
383 LDoubleRegister() : LOperand() { }
384 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
385};
386
387
Ben Murdochb8e0da22011-05-16 14:20:40 +0100388class LParallelMove : public ZoneObject {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100389 public:
Ben Murdochb8e0da22011-05-16 14:20:40 +0100390 LParallelMove() : move_operands_(4) { }
391
392 void AddMove(LOperand* from, LOperand* to) {
393 move_operands_.Add(LMoveOperands(from, to));
394 }
395
396 bool IsRedundant() const;
397
398 const ZoneList<LMoveOperands>* move_operands() const {
399 return &move_operands_;
400 }
401
402 void PrintDataTo(StringStream* stream) const;
Ben Murdoch086aeea2011-05-13 15:57:08 +0100403
404 private:
Ben Murdochb8e0da22011-05-16 14:20:40 +0100405 ZoneList<LMoveOperands> move_operands_;
Ben Murdoch086aeea2011-05-13 15:57:08 +0100406};
407
408
Ben Murdochb8e0da22011-05-16 14:20:40 +0100409class LPointerMap: public ZoneObject {
410 public:
411 explicit LPointerMap(int position)
412 : pointer_operands_(8), position_(position), lithium_position_(-1) { }
413
414 const ZoneList<LOperand*>* operands() const { return &pointer_operands_; }
415 int position() const { return position_; }
416 int lithium_position() const { return lithium_position_; }
417
418 void set_lithium_position(int pos) {
419 ASSERT(lithium_position_ == -1);
420 lithium_position_ = pos;
421 }
422
423 void RecordPointer(LOperand* op);
424 void PrintTo(StringStream* stream);
425
426 private:
427 ZoneList<LOperand*> pointer_operands_;
428 int position_;
429 int lithium_position_;
430};
431
432
433class LEnvironment: public ZoneObject {
434 public:
435 LEnvironment(Handle<JSFunction> closure,
436 int ast_id,
437 int parameter_count,
438 int argument_count,
439 int value_count,
440 LEnvironment* outer)
441 : closure_(closure),
442 arguments_stack_height_(argument_count),
443 deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
444 translation_index_(-1),
445 ast_id_(ast_id),
446 parameter_count_(parameter_count),
447 values_(value_count),
448 representations_(value_count),
449 spilled_registers_(NULL),
450 spilled_double_registers_(NULL),
451 outer_(outer) {
452 }
453
454 Handle<JSFunction> closure() const { return closure_; }
455 int arguments_stack_height() const { return arguments_stack_height_; }
456 int deoptimization_index() const { return deoptimization_index_; }
457 int translation_index() const { return translation_index_; }
458 int ast_id() const { return ast_id_; }
459 int parameter_count() const { return parameter_count_; }
460 LOperand** spilled_registers() const { return spilled_registers_; }
461 LOperand** spilled_double_registers() const {
462 return spilled_double_registers_;
463 }
464 const ZoneList<LOperand*>* values() const { return &values_; }
465 LEnvironment* outer() const { return outer_; }
466
467 void AddValue(LOperand* operand, Representation representation) {
468 values_.Add(operand);
469 representations_.Add(representation);
470 }
471
472 bool HasTaggedValueAt(int index) const {
473 return representations_[index].IsTagged();
474 }
475
476 void Register(int deoptimization_index, int translation_index) {
477 ASSERT(!HasBeenRegistered());
478 deoptimization_index_ = deoptimization_index;
479 translation_index_ = translation_index;
480 }
481 bool HasBeenRegistered() const {
482 return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
483 }
484
485 void SetSpilledRegisters(LOperand** registers,
486 LOperand** double_registers) {
487 spilled_registers_ = registers;
488 spilled_double_registers_ = double_registers;
489 }
490
491 void PrintTo(StringStream* stream);
492
493 private:
494 Handle<JSFunction> closure_;
495 int arguments_stack_height_;
496 int deoptimization_index_;
497 int translation_index_;
498 int ast_id_;
499 int parameter_count_;
500 ZoneList<LOperand*> values_;
501 ZoneList<Representation> representations_;
502
503 // Allocation index indexed arrays of spill slot operands for registers
504 // that are also in spill slots at an OSR entry. NULL for environments
505 // that do not correspond to an OSR entry.
506 LOperand** spilled_registers_;
507 LOperand** spilled_double_registers_;
508
509 LEnvironment* outer_;
510
511 friend class LCodegen;
512};
513
Steve Block1e0659c2011-05-24 12:43:12 +0100514
515// Iterates over the non-null, non-constant operands in an environment.
516class ShallowIterator BASE_EMBEDDED {
517 public:
518 explicit ShallowIterator(LEnvironment* env)
519 : env_(env),
520 limit_(env != NULL ? env->values()->length() : 0),
521 current_(0) {
522 current_ = AdvanceToNext(0);
523 }
524
525 inline bool HasNext() {
526 return env_ != NULL && current_ < limit_;
527 }
528
529 inline LOperand* Next() {
530 ASSERT(HasNext());
531 return env_->values()->at(current_);
532 }
533
534 inline void Advance() {
535 current_ = AdvanceToNext(current_ + 1);
536 }
537
538 inline LEnvironment* env() { return env_; }
539
540 private:
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100541 inline bool ShouldSkip(LOperand* op) {
542 return op == NULL || op->IsConstantOperand() || op->IsArgument();
543 }
544
Steve Block1e0659c2011-05-24 12:43:12 +0100545 inline int AdvanceToNext(int start) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100546 while (start < limit_ && ShouldSkip(env_->values()->at(start))) {
Steve Block1e0659c2011-05-24 12:43:12 +0100547 start++;
548 }
549 return start;
550 }
551
552 LEnvironment* env_;
553 int limit_;
554 int current_;
555};
556
557
558// Iterator for non-null, non-constant operands incl. outer environments.
559class DeepIterator BASE_EMBEDDED {
560 public:
561 explicit DeepIterator(LEnvironment* env)
562 : current_iterator_(env) { }
563
564 inline bool HasNext() {
565 if (current_iterator_.HasNext()) return true;
566 if (current_iterator_.env() == NULL) return false;
567 AdvanceToOuter();
568 return current_iterator_.HasNext();
569 }
570
571 inline LOperand* Next() {
572 ASSERT(current_iterator_.HasNext());
573 return current_iterator_.Next();
574 }
575
576 inline void Advance() {
577 if (current_iterator_.HasNext()) {
578 current_iterator_.Advance();
579 } else {
580 AdvanceToOuter();
581 }
582 }
583
584 private:
585 inline void AdvanceToOuter() {
586 current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
587 }
588
589 ShallowIterator current_iterator_;
590};
591
Ben Murdoch257744e2011-11-30 15:57:28 +0000592
593int ExternalArrayTypeToShiftSize(ExternalArrayType type);
594
595
Ben Murdoch086aeea2011-05-13 15:57:08 +0100596} } // namespace v8::internal
597
598#endif // V8_LITHIUM_H_