blob: 454e3024f33d34a1252fb1b4f1f34a64edcb07fe [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_LITHIUM_ALLOCATOR_H_
29#define V8_LITHIUM_ALLOCATOR_H_
30
31#include "v8.h"
32
Steve Block9fac8402011-05-12 15:51:54 +010033#include "data-flow.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "zone.h"
35
36namespace v8 {
37namespace internal {
38
39// Forward declarations.
40class HBasicBlock;
41class HGraph;
42class HInstruction;
43class HPhi;
44class HTracer;
45class HValue;
46class BitVector;
47class StringStream;
48
49class LArgument;
50class LChunk;
51class LConstantOperand;
52class LGap;
53class LInstruction;
54class LParallelMove;
55class LPointerMap;
56class LStackSlot;
57class LRegister;
58
59
60// This class represents a single point of a LOperand's lifetime.
61// For each lithium instruction there are exactly two lifetime positions:
62// the beginning and the end of the instruction. Lifetime positions for
63// different lithium instructions are disjoint.
64class LifetimePosition {
65 public:
66 // Return the lifetime position that corresponds to the beginning of
67 // the instruction with the given index.
68 static LifetimePosition FromInstructionIndex(int index) {
69 return LifetimePosition(index * kStep);
70 }
71
72 // Returns a numeric representation of this lifetime position.
73 int Value() const {
74 return value_;
75 }
76
77 // Returns the index of the instruction to which this lifetime position
78 // corresponds.
79 int InstructionIndex() const {
80 ASSERT(IsValid());
81 return value_ / kStep;
82 }
83
84 // Returns true if this lifetime position corresponds to the instruction
85 // start.
86 bool IsInstructionStart() const {
87 return (value_ & (kStep - 1)) == 0;
88 }
89
90 // Returns the lifetime position for the start of the instruction which
91 // corresponds to this lifetime position.
92 LifetimePosition InstructionStart() const {
93 ASSERT(IsValid());
94 return LifetimePosition(value_ & ~(kStep - 1));
95 }
96
97 // Returns the lifetime position for the end of the instruction which
98 // corresponds to this lifetime position.
99 LifetimePosition InstructionEnd() const {
100 ASSERT(IsValid());
101 return LifetimePosition(InstructionStart().Value() + kStep/2);
102 }
103
104 // Returns the lifetime position for the beginning of the next instruction.
105 LifetimePosition NextInstruction() const {
106 ASSERT(IsValid());
107 return LifetimePosition(InstructionStart().Value() + kStep);
108 }
109
110 // Returns the lifetime position for the beginning of the previous
111 // instruction.
112 LifetimePosition PrevInstruction() const {
113 ASSERT(IsValid());
114 ASSERT(value_ > 1);
115 return LifetimePosition(InstructionStart().Value() - kStep);
116 }
117
118 // Constructs the lifetime position which does not correspond to any
119 // instruction.
120 LifetimePosition() : value_(-1) {}
121
122 // Returns true if this lifetime positions corrensponds to some
123 // instruction.
124 bool IsValid() const { return value_ != -1; }
125
126 static inline LifetimePosition Invalid() { return LifetimePosition(); }
127
128 static inline LifetimePosition MaxPosition() {
129 // We have to use this kind of getter instead of static member due to
130 // crash bug in GDB.
131 return LifetimePosition(kMaxInt);
132 }
133
134 private:
135 static const int kStep = 2;
136
137 // Code relies on kStep being a power of two.
138 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
139
140 explicit LifetimePosition(int value) : value_(value) { }
141
142 int value_;
143};
144
145
146enum RegisterKind {
147 NONE,
148 GENERAL_REGISTERS,
149 DOUBLE_REGISTERS
150};
151
152
153class LOperand: public ZoneObject {
154 public:
155 enum Kind {
156 INVALID,
157 UNALLOCATED,
158 CONSTANT_OPERAND,
159 STACK_SLOT,
160 DOUBLE_STACK_SLOT,
161 REGISTER,
162 DOUBLE_REGISTER,
163 ARGUMENT
164 };
165
166 LOperand() : value_(KindField::encode(INVALID)) { }
167
168 Kind kind() const { return KindField::decode(value_); }
169 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
170 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
171 bool IsStackSlot() const { return kind() == STACK_SLOT; }
172 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
173 bool IsRegister() const { return kind() == REGISTER; }
174 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
175 bool IsArgument() const { return kind() == ARGUMENT; }
176 bool IsUnallocated() const { return kind() == UNALLOCATED; }
177 bool Equals(LOperand* other) const { return value_ == other->value_; }
178 int VirtualRegister();
179
180 void PrintTo(StringStream* stream);
181 void ConvertTo(Kind kind, int index) {
182 value_ = KindField::encode(kind);
183 value_ |= index << kKindFieldWidth;
184 ASSERT(this->index() == index);
185 }
186
187 protected:
188 static const int kKindFieldWidth = 3;
189 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
190
191 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
192
193 unsigned value_;
194};
195
196
197class LUnallocated: public LOperand {
198 public:
199 enum Policy {
200 NONE,
201 ANY,
202 FIXED_REGISTER,
203 FIXED_DOUBLE_REGISTER,
204 FIXED_SLOT,
205 MUST_HAVE_REGISTER,
206 WRITABLE_REGISTER,
207 SAME_AS_FIRST_INPUT,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208 IGNORE
209 };
210
211 // Lifetime of operand inside the instruction.
212 enum Lifetime {
213 // USED_AT_START operand is guaranteed to be live only at
214 // instruction start. Register allocator is free to assign the same register
215 // to some other operand used inside instruction (i.e. temporary or
216 // output).
217 USED_AT_START,
218
219 // USED_AT_END operand is treated as live until the end of
220 // instruction. This means that register allocator will not reuse it's
221 // register for any other operand inside instruction.
222 USED_AT_END
223 };
224
225 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
226 Initialize(policy, 0, USED_AT_END);
227 }
228
229 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
230 Initialize(policy, fixed_index, USED_AT_END);
231 }
232
233 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
234 Initialize(policy, 0, lifetime);
235 }
236
237 // The superclass has a KindField. Some policies have a signed fixed
238 // index in the upper bits.
239 static const int kPolicyWidth = 4;
240 static const int kLifetimeWidth = 1;
241 static const int kVirtualRegisterWidth = 17;
242
243 static const int kPolicyShift = kKindFieldWidth;
244 static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
245 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
246 static const int kFixedIndexShift =
247 kVirtualRegisterShift + kVirtualRegisterWidth;
248
249 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
250
251 class LifetimeField
252 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
253 };
254
255 class VirtualRegisterField
256 : public BitField<unsigned,
257 kVirtualRegisterShift,
258 kVirtualRegisterWidth> {
259 };
260
261 static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
262 static const int kMaxFixedIndices = 128;
263
264 bool HasIgnorePolicy() const { return policy() == IGNORE; }
265 bool HasNoPolicy() const { return policy() == NONE; }
266 bool HasAnyPolicy() const {
267 return policy() == ANY;
268 }
269 bool HasFixedPolicy() const {
270 return policy() == FIXED_REGISTER ||
271 policy() == FIXED_DOUBLE_REGISTER ||
272 policy() == FIXED_SLOT;
273 }
274 bool HasRegisterPolicy() const {
275 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
276 }
277 bool HasSameAsInputPolicy() const {
Steve Block9fac8402011-05-12 15:51:54 +0100278 return policy() == SAME_AS_FIRST_INPUT;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100279 }
280 Policy policy() const { return PolicyField::decode(value_); }
281 void set_policy(Policy policy) {
282 value_ &= ~PolicyField::mask();
283 value_ |= PolicyField::encode(policy);
284 }
285 int fixed_index() const {
286 return static_cast<int>(value_) >> kFixedIndexShift;
287 }
288
289 unsigned virtual_register() const {
290 return VirtualRegisterField::decode(value_);
291 }
292
293 void set_virtual_register(unsigned id) {
294 value_ &= ~VirtualRegisterField::mask();
295 value_ |= VirtualRegisterField::encode(id);
296 }
297
298 LUnallocated* CopyUnconstrained() {
299 LUnallocated* result = new LUnallocated(ANY);
300 result->set_virtual_register(virtual_register());
301 return result;
302 }
303
304 static LUnallocated* cast(LOperand* op) {
305 ASSERT(op->IsUnallocated());
306 return reinterpret_cast<LUnallocated*>(op);
307 }
308
309 bool IsUsedAtStart() {
310 return LifetimeField::decode(value_) == USED_AT_START;
311 }
312
313 private:
314 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
315 value_ |= PolicyField::encode(policy);
316 value_ |= LifetimeField::encode(lifetime);
317 value_ |= fixed_index << kFixedIndexShift;
318 ASSERT(this->fixed_index() == fixed_index);
319 }
320};
321
322
323class LMoveOperands BASE_EMBEDDED {
324 public:
325 LMoveOperands(LOperand* from, LOperand* to) : from_(from), to_(to) { }
326
327 LOperand* from() const { return from_; }
328 LOperand* to() const { return to_; }
329 bool IsRedundant() const {
330 return IsEliminated() || from_->Equals(to_) || IsIgnored();
331 }
332 bool IsEliminated() const { return from_ == NULL; }
333 bool IsIgnored() const {
334 if (to_ != NULL && to_->IsUnallocated() &&
335 LUnallocated::cast(to_)->HasIgnorePolicy()) {
336 return true;
337 }
338 return false;
339 }
340
341 void Eliminate() { from_ = to_ = NULL; }
342
343 private:
344 LOperand* from_;
345 LOperand* to_;
346};
347
348
349class LConstantOperand: public LOperand {
350 public:
351 static LConstantOperand* Create(int index) {
352 ASSERT(index >= 0);
353 if (index < kNumCachedOperands) return &cache[index];
354 return new LConstantOperand(index);
355 }
356
357 static LConstantOperand* cast(LOperand* op) {
358 ASSERT(op->IsConstantOperand());
359 return reinterpret_cast<LConstantOperand*>(op);
360 }
361
362 static void SetupCache();
363
364 private:
365 static const int kNumCachedOperands = 128;
366 static LConstantOperand cache[];
367
368 LConstantOperand() : LOperand() { }
369 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
370};
371
372
373class LArgument: public LOperand {
374 public:
375 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
376
377 static LArgument* cast(LOperand* op) {
378 ASSERT(op->IsArgument());
379 return reinterpret_cast<LArgument*>(op);
380 }
381};
382
383
384class LStackSlot: public LOperand {
385 public:
386 static LStackSlot* Create(int index) {
387 ASSERT(index >= 0);
388 if (index < kNumCachedOperands) return &cache[index];
389 return new LStackSlot(index);
390 }
391
392 static LStackSlot* cast(LOperand* op) {
393 ASSERT(op->IsStackSlot());
394 return reinterpret_cast<LStackSlot*>(op);
395 }
396
397 static void SetupCache();
398
399 private:
400 static const int kNumCachedOperands = 128;
401 static LStackSlot cache[];
402
403 LStackSlot() : LOperand() { }
404 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
405};
406
407
408class LDoubleStackSlot: public LOperand {
409 public:
410 static LDoubleStackSlot* Create(int index) {
411 ASSERT(index >= 0);
412 if (index < kNumCachedOperands) return &cache[index];
413 return new LDoubleStackSlot(index);
414 }
415
416 static LDoubleStackSlot* cast(LOperand* op) {
417 ASSERT(op->IsStackSlot());
418 return reinterpret_cast<LDoubleStackSlot*>(op);
419 }
420
421 static void SetupCache();
422
423 private:
424 static const int kNumCachedOperands = 128;
425 static LDoubleStackSlot cache[];
426
427 LDoubleStackSlot() : LOperand() { }
428 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
429};
430
431
432class LRegister: public LOperand {
433 public:
434 static LRegister* Create(int index) {
435 ASSERT(index >= 0);
436 if (index < kNumCachedOperands) return &cache[index];
437 return new LRegister(index);
438 }
439
440 static LRegister* cast(LOperand* op) {
441 ASSERT(op->IsRegister());
442 return reinterpret_cast<LRegister*>(op);
443 }
444
445 static void SetupCache();
446
447 private:
448 static const int kNumCachedOperands = 16;
449 static LRegister cache[];
450
451 LRegister() : LOperand() { }
452 explicit LRegister(int index) : LOperand(REGISTER, index) { }
453};
454
455
456class LDoubleRegister: public LOperand {
457 public:
458 static LDoubleRegister* Create(int index) {
459 ASSERT(index >= 0);
460 if (index < kNumCachedOperands) return &cache[index];
461 return new LDoubleRegister(index);
462 }
463
464 static LDoubleRegister* cast(LOperand* op) {
465 ASSERT(op->IsDoubleRegister());
466 return reinterpret_cast<LDoubleRegister*>(op);
467 }
468
469 static void SetupCache();
470
471 private:
472 static const int kNumCachedOperands = 16;
473 static LDoubleRegister cache[];
474
475 LDoubleRegister() : LOperand() { }
476 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
477};
478
479
480// A register-allocator view of a Lithium instruction. It contains the id of
481// the output operand and a list of input operand uses.
482class InstructionSummary: public ZoneObject {
483 public:
484 InstructionSummary()
Ben Murdoch086aeea2011-05-13 15:57:08 +0100485 : output_operand_(NULL),
486 input_count_(0),
487 operands_(4),
488 is_call_(false),
489 is_save_doubles_(false) {}
Ben Murdochb0fe1622011-05-05 13:52:32 +0100490
491 // Output operands.
492 LOperand* Output() const { return output_operand_; }
493 void SetOutput(LOperand* output) {
494 ASSERT(output_operand_ == NULL);
495 output_operand_ = output;
496 }
497
498 // Input operands.
499 int InputCount() const { return input_count_; }
500 LOperand* InputAt(int i) const {
501 ASSERT(i < input_count_);
502 return operands_[i];
503 }
504 void AddInput(LOperand* input) {
505 operands_.InsertAt(input_count_, input);
506 input_count_++;
507 }
508
509 // Temporary operands.
510 int TempCount() const { return operands_.length() - input_count_; }
511 LOperand* TempAt(int i) const { return operands_[i + input_count_]; }
512 void AddTemp(LOperand* temp) { operands_.Add(temp); }
513
514 void MarkAsCall() { is_call_ = true; }
515 bool IsCall() const { return is_call_; }
516
Ben Murdoch086aeea2011-05-13 15:57:08 +0100517 void MarkAsSaveDoubles() { is_save_doubles_ = true; }
518 bool IsSaveDoubles() const { return is_save_doubles_; }
519
Ben Murdochb0fe1622011-05-05 13:52:32 +0100520 private:
521 LOperand* output_operand_;
522 int input_count_;
523 ZoneList<LOperand*> operands_;
524 bool is_call_;
Ben Murdoch086aeea2011-05-13 15:57:08 +0100525 bool is_save_doubles_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100526};
527
528// Representation of the non-empty interval [start,end[.
529class UseInterval: public ZoneObject {
530 public:
531 UseInterval(LifetimePosition start, LifetimePosition end)
532 : start_(start), end_(end), next_(NULL) {
533 ASSERT(start.Value() < end.Value());
534 }
535
536 LifetimePosition start() const { return start_; }
537 LifetimePosition end() const { return end_; }
538 UseInterval* next() const { return next_; }
539
540 // Split this interval at the given position without effecting the
541 // live range that owns it. The interval must contain the position.
542 void SplitAt(LifetimePosition pos);
543
544 // If this interval intersects with other return smallest position
545 // that belongs to both of them.
546 LifetimePosition Intersect(const UseInterval* other) const {
547 if (other->start().Value() < start_.Value()) return other->Intersect(this);
548 if (other->start().Value() < end_.Value()) return other->start();
549 return LifetimePosition::Invalid();
550 }
551
552 bool Contains(LifetimePosition point) const {
553 return start_.Value() <= point.Value() && point.Value() < end_.Value();
554 }
555
556 private:
557 void set_start(LifetimePosition start) { start_ = start; }
558 void set_next(UseInterval* next) { next_ = next; }
559
560 LifetimePosition start_;
561 LifetimePosition end_;
562 UseInterval* next_;
563
564 friend class LiveRange; // Assigns to start_.
565};
566
567// Representation of a use position.
568class UsePosition: public ZoneObject {
569 public:
570 UsePosition(LifetimePosition pos, LOperand* operand)
571 : operand_(operand),
572 hint_(NULL),
573 pos_(pos),
574 next_(NULL),
575 requires_reg_(false),
576 register_beneficial_(true) {
577 if (operand_ != NULL && operand_->IsUnallocated()) {
578 LUnallocated* unalloc = LUnallocated::cast(operand_);
579 requires_reg_ = unalloc->HasRegisterPolicy();
580 register_beneficial_ = !unalloc->HasAnyPolicy();
581 }
582 ASSERT(pos_.IsValid());
583 }
584
585 LOperand* operand() const { return operand_; }
586 bool HasOperand() const { return operand_ != NULL; }
587
588 LOperand* hint() const { return hint_; }
589 void set_hint(LOperand* hint) { hint_ = hint; }
590 bool HasHint() const { return hint_ != NULL && !hint_->IsUnallocated(); }
591 bool RequiresRegister() const;
592 bool RegisterIsBeneficial() const;
593
594 LifetimePosition pos() const { return pos_; }
595 UsePosition* next() const { return next_; }
596
597 private:
598 void set_next(UsePosition* next) { next_ = next; }
599
600 LOperand* operand_;
601 LOperand* hint_;
602 LifetimePosition pos_;
603 UsePosition* next_;
604 bool requires_reg_;
605 bool register_beneficial_;
606
607 friend class LiveRange;
608};
609
610// Representation of SSA values' live ranges as a collection of (continuous)
611// intervals over the instruction ordering.
612class LiveRange: public ZoneObject {
613 public:
614 static const int kInvalidAssignment = 0x7fffffff;
615
616 explicit LiveRange(int id)
617 : id_(id),
618 spilled_(false),
619 assigned_register_(kInvalidAssignment),
620 assigned_register_kind_(NONE),
621 last_interval_(NULL),
622 first_interval_(NULL),
623 first_pos_(NULL),
624 parent_(NULL),
625 next_(NULL),
626 current_interval_(NULL),
627 last_processed_use_(NULL),
628 spill_start_index_(kMaxInt) {
629 spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
630 }
631
632 UseInterval* first_interval() const { return first_interval_; }
633 UsePosition* first_pos() const { return first_pos_; }
634 LiveRange* parent() const { return parent_; }
635 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
636 LiveRange* next() const { return next_; }
637 bool IsChild() const { return parent() != NULL; }
638 bool IsParent() const { return parent() == NULL; }
639 int id() const { return id_; }
640 bool IsFixed() const { return id_ < 0; }
641 bool IsEmpty() const { return first_interval() == NULL; }
642 LOperand* CreateAssignedOperand();
643 int assigned_register() const { return assigned_register_; }
644 int spill_start_index() const { return spill_start_index_; }
645 void set_assigned_register(int reg, RegisterKind register_kind) {
646 ASSERT(!HasRegisterAssigned() && !IsSpilled());
647 assigned_register_ = reg;
648 assigned_register_kind_ = register_kind;
649 ConvertOperands();
650 }
651 void MakeSpilled() {
652 ASSERT(!IsSpilled());
653 ASSERT(TopLevel()->HasAllocatedSpillOperand());
654 spilled_ = true;
655 assigned_register_ = kInvalidAssignment;
656 ConvertOperands();
657 }
658
659 // Returns use position in this live range that follows both start
660 // and last processed use position.
661 // Modifies internal state of live range!
662 UsePosition* NextUsePosition(LifetimePosition start);
663
664 // Returns use position for which register is required in this live
665 // range and which follows both start and last processed use position
666 // Modifies internal state of live range!
667 UsePosition* NextRegisterPosition(LifetimePosition start);
668
669 // Returns use position for which register is beneficial in this live
670 // range and which follows both start and last processed use position
671 // Modifies internal state of live range!
672 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
673
674 // Can this live range be spilled at this position.
675 bool CanBeSpilled(LifetimePosition pos);
676
677 // Split this live range at the given position which must follow the start of
678 // the range.
679 // All uses following the given position will be moved from this
680 // live range to the result live range.
681 void SplitAt(LifetimePosition position, LiveRange* result);
682
683 bool IsDouble() const { return assigned_register_kind_ == DOUBLE_REGISTERS; }
684 bool HasRegisterAssigned() const {
685 return assigned_register_ != kInvalidAssignment;
686 }
687 bool IsSpilled() const { return spilled_; }
688 UsePosition* FirstPosWithHint() const;
689
690 LOperand* FirstHint() const {
691 UsePosition* pos = FirstPosWithHint();
692 if (pos != NULL) return pos->hint();
693 return NULL;
694 }
695
696 LifetimePosition Start() const {
697 ASSERT(!IsEmpty());
698 return first_interval()->start();
699 }
700
701 LifetimePosition End() const {
702 ASSERT(!IsEmpty());
703 return last_interval_->end();
704 }
705
706 bool HasAllocatedSpillOperand() const {
707 return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
708 }
709 LOperand* GetSpillOperand() const { return spill_operand_; }
710 void SetSpillOperand(LOperand* operand) {
711 ASSERT(!operand->IsUnallocated());
712 ASSERT(spill_operand_ != NULL);
713 ASSERT(spill_operand_->IsUnallocated());
714 spill_operand_->ConvertTo(operand->kind(), operand->index());
715 }
716
717 void SetSpillStartIndex(int start) {
718 spill_start_index_ = Min(start, spill_start_index_);
719 }
720
721 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
722 bool CanCover(LifetimePosition position) const;
723 bool Covers(LifetimePosition position);
724 LifetimePosition FirstIntersection(LiveRange* other);
725
726
727 // Add a new interval or a new use position to this live range.
728 void EnsureInterval(LifetimePosition start, LifetimePosition end);
729 void AddUseInterval(LifetimePosition start, LifetimePosition end);
730 UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand);
731 UsePosition* AddUsePosition(LifetimePosition pos);
732
733 // Shorten the most recently added interval by setting a new start.
734 void ShortenTo(LifetimePosition start);
735
736#ifdef DEBUG
737 // True if target overlaps an existing interval.
738 bool HasOverlap(UseInterval* target) const;
739 void Verify() const;
740#endif
741
742 private:
743 void ConvertOperands();
744 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
745 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
746 LifetimePosition but_not_past) const;
747
748 int id_;
749 bool spilled_;
750 int assigned_register_;
751 RegisterKind assigned_register_kind_;
752 UseInterval* last_interval_;
753 UseInterval* first_interval_;
754 UsePosition* first_pos_;
755 LiveRange* parent_;
756 LiveRange* next_;
757 // This is used as a cache, it doesn't affect correctness.
758 mutable UseInterval* current_interval_;
759 UsePosition* last_processed_use_;
760 LOperand* spill_operand_;
761 int spill_start_index_;
762};
763
764
Steve Block9fac8402011-05-12 15:51:54 +0100765class GrowableBitVector BASE_EMBEDDED {
766 public:
767 GrowableBitVector() : bits_(NULL) { }
768
769 bool Contains(int value) const {
770 if (!InBitsRange(value)) return false;
771 return bits_->Contains(value);
772 }
773
774 void Add(int value) {
775 EnsureCapacity(value);
776 bits_->Add(value);
777 }
778
779 private:
780 static const int kInitialLength = 1024;
781
782 bool InBitsRange(int value) const {
783 return bits_ != NULL && bits_->length() > value;
784 }
785
786 void EnsureCapacity(int value) {
787 if (InBitsRange(value)) return;
788 int new_length = bits_ == NULL ? kInitialLength : bits_->length();
789 while (new_length <= value) new_length *= 2;
790 BitVector* new_bits = new BitVector(new_length);
791 if (bits_ != NULL) new_bits->CopyFrom(*bits_);
792 bits_ = new_bits;
793 }
794
795 BitVector* bits_;
796};
797
798
Ben Murdochb0fe1622011-05-05 13:52:32 +0100799class LAllocator BASE_EMBEDDED {
800 public:
801 explicit LAllocator(int first_virtual_register, HGraph* graph)
802 : chunk_(NULL),
803 summaries_(0),
804 next_summary_(NULL),
805 summary_stack_(2),
806 live_in_sets_(0),
807 live_ranges_(16),
808 fixed_live_ranges_(8),
809 fixed_double_live_ranges_(8),
810 unhandled_live_ranges_(8),
811 active_live_ranges_(8),
812 inactive_live_ranges_(8),
813 reusable_slots_(8),
814 next_virtual_register_(first_virtual_register),
Steve Block9fac8402011-05-12 15:51:54 +0100815 first_artificial_register_(first_virtual_register),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100816 mode_(NONE),
817 num_registers_(-1),
818 graph_(graph),
819 has_osr_entry_(false) {}
820
821 static void Setup();
822 static void TraceAlloc(const char* msg, ...);
823
824 // Lithium translation support.
825 // Record a use of an input operand in the current instruction.
826 void RecordUse(HValue* value, LUnallocated* operand);
827 // Record the definition of the output operand.
828 void RecordDefinition(HInstruction* instr, LUnallocated* operand);
829 // Record a temporary operand.
830 void RecordTemporary(LUnallocated* operand);
831
832 // Marks the current instruction as a call.
833 void MarkAsCall();
834
Ben Murdoch086aeea2011-05-13 15:57:08 +0100835 // Marks the current instruction as requiring saving double registers.
836 void MarkAsSaveDoubles();
837
Ben Murdochb0fe1622011-05-05 13:52:32 +0100838 // Checks whether the value of a given virtual register is tagged.
839 bool HasTaggedValue(int virtual_register) const;
840
841 // Returns the register kind required by the given virtual register.
842 RegisterKind RequiredRegisterKind(int virtual_register) const;
843
844 // Begin a new instruction.
845 void BeginInstruction();
846
847 // Summarize the current instruction.
848 void SummarizeInstruction(int index);
849
850 // Summarize the current instruction.
851 void OmitInstruction();
852
853 // Control max function size.
854 static int max_initial_value_ids();
855
856 void Allocate(LChunk* chunk);
857
858 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
859 const ZoneList<LiveRange*>* fixed_live_ranges() const {
860 return &fixed_live_ranges_;
861 }
862 const ZoneList<LiveRange*>* fixed_double_live_ranges() const {
863 return &fixed_double_live_ranges_;
864 }
865
866 LChunk* chunk() const { return chunk_; }
867 HGraph* graph() const { return graph_; }
868
869 void MarkAsOsrEntry() {
870 // There can be only one.
871 ASSERT(!has_osr_entry_);
872 // Simply set a flag to find and process instruction later.
873 has_osr_entry_ = true;
874 }
875
876#ifdef DEBUG
877 void Verify() const;
878#endif
879
880 private:
881 void MeetRegisterConstraints();
882 void ResolvePhis();
883 void BuildLiveRanges();
884 void AllocateGeneralRegisters();
885 void AllocateDoubleRegisters();
886 void ConnectRanges();
887 void ResolveControlFlow();
888 void PopulatePointerMaps();
889 void ProcessOsrEntry();
890 void AllocateRegisters();
891 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
892 inline bool SafePointsAreInOrder() const;
893
894 // Liveness analysis support.
895 void InitializeLivenessAnalysis();
896 BitVector* ComputeLiveOut(HBasicBlock* block);
897 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
898 void ProcessInstructions(HBasicBlock* block, BitVector* live);
899 void MeetRegisterConstraints(HBasicBlock* block);
900 void MeetConstraintsBetween(InstructionSummary* first,
901 InstructionSummary* second,
902 int gap_index);
903 void ResolvePhis(HBasicBlock* block);
904
905 // Helper methods for building intervals.
906 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
907 LiveRange* LiveRangeFor(LOperand* operand);
908 void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
909 void Use(LifetimePosition block_start,
910 LifetimePosition position,
911 LOperand* operand,
912 LOperand* hint);
913 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
914
915 // Helper methods for updating the life range lists.
916 void AddToActive(LiveRange* range);
917 void AddToInactive(LiveRange* range);
918 void AddToUnhandledSorted(LiveRange* range);
919 void AddToUnhandledUnsorted(LiveRange* range);
920 void SortUnhandled();
921 bool UnhandledIsSorted();
922 void ActiveToHandled(LiveRange* range);
923 void ActiveToInactive(LiveRange* range);
924 void InactiveToHandled(LiveRange* range);
925 void InactiveToActive(LiveRange* range);
926 void FreeSpillSlot(LiveRange* range);
927 LOperand* TryReuseSpillSlot(LiveRange* range);
928
929 // Helper methods for allocating registers.
930 bool TryAllocateFreeReg(LiveRange* range);
931 void AllocateBlockedReg(LiveRange* range);
932
933 // Live range splitting helpers.
934
935 // Split the given range at the given position.
936 // If range starts at or after the given position then the
937 // original range is returned.
938 // Otherwise returns the live range that starts at pos and contains
939 // all uses from the original range that follow pos. Uses at pos will
940 // still be owned by the original range after splitting.
941 LiveRange* SplitAt(LiveRange* range, LifetimePosition pos);
942
943 // Split the given range in a position from the interval [start, end].
944 LiveRange* SplitBetween(LiveRange* range,
945 LifetimePosition start,
946 LifetimePosition end);
947
948 // Find a lifetime position in the interval [start, end] which
949 // is optimal for splitting: it is either header of the outermost
950 // loop covered by this interval or the latest possible position.
951 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
952 LifetimePosition end);
953
954 // Spill the given life range after position pos.
955 void SpillAfter(LiveRange* range, LifetimePosition pos);
956
957 // Spill the given life range after position start and up to position end.
958 void SpillBetween(LiveRange* range,
959 LifetimePosition start,
960 LifetimePosition end);
961
962 void SplitAndSpillIntersecting(LiveRange* range);
963
964 void Spill(LiveRange* range);
965 bool IsBlockBoundary(LifetimePosition pos);
966 void AddGapMove(int pos, LiveRange* prev, LiveRange* next);
967
968 // Helper methods for resolving control flow.
969 void ResolveControlFlow(LiveRange* range,
970 HBasicBlock* block,
971 HBasicBlock* pred);
972
973 // Return parallel move that should be used to connect ranges split at the
974 // given position.
975 LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
976
977 // Return the block which contains give lifetime position.
978 HBasicBlock* GetBlock(LifetimePosition pos);
979
980 // Current active summary.
981 InstructionSummary* current_summary() const { return summary_stack_.last(); }
982
983 // Get summary for given instruction index.
984 InstructionSummary* GetSummary(int index) const { return summaries_[index]; }
985
986 // Helper methods for the fixed registers.
987 int RegisterCount() const;
988 static int FixedLiveRangeID(int index) { return -index - 1; }
989 static int FixedDoubleLiveRangeID(int index);
990 LiveRange* FixedLiveRangeFor(int index);
991 LiveRange* FixedDoubleLiveRangeFor(int index);
992 LiveRange* LiveRangeFor(int index);
993 HPhi* LookupPhi(LOperand* operand) const;
994 LGap* GetLastGap(HBasicBlock* block) const;
995
996 const char* RegisterName(int allocation_index);
997
998 LChunk* chunk_;
999 ZoneList<InstructionSummary*> summaries_;
1000 InstructionSummary* next_summary_;
1001
1002 ZoneList<InstructionSummary*> summary_stack_;
1003
1004 // During liveness analysis keep a mapping from block id to live_in sets
1005 // for blocks already analyzed.
1006 ZoneList<BitVector*> live_in_sets_;
1007
1008 // Liveness analysis results.
1009 ZoneList<LiveRange*> live_ranges_;
1010
1011 // Lists of live ranges
1012 ZoneList<LiveRange*> fixed_live_ranges_;
1013 ZoneList<LiveRange*> fixed_double_live_ranges_;
1014 ZoneList<LiveRange*> unhandled_live_ranges_;
1015 ZoneList<LiveRange*> active_live_ranges_;
1016 ZoneList<LiveRange*> inactive_live_ranges_;
1017 ZoneList<LiveRange*> reusable_slots_;
1018
1019 // Next virtual register number to be assigned to temporaries.
1020 int next_virtual_register_;
Steve Block9fac8402011-05-12 15:51:54 +01001021 int first_artificial_register_;
1022 GrowableBitVector double_artificial_registers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001023
1024 RegisterKind mode_;
1025 int num_registers_;
1026
1027 HGraph* graph_;
1028
1029 bool has_osr_entry_;
1030
1031 DISALLOW_COPY_AND_ASSIGN(LAllocator);
1032};
1033
1034
1035} } // namespace v8::internal
1036
1037#endif // V8_LITHIUM_ALLOCATOR_H_