blob: fe837e2f518588fb76a3fd0875d5764649f833a5 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_LITHIUM_ALLOCATOR_H_
29#define V8_LITHIUM_ALLOCATOR_H_
30
31#include "v8.h"
32
Steve Block9fac8402011-05-12 15:51:54 +010033#include "data-flow.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "zone.h"
35
36namespace v8 {
37namespace internal {
38
39// Forward declarations.
40class HBasicBlock;
41class HGraph;
42class HInstruction;
43class HPhi;
44class HTracer;
45class HValue;
46class BitVector;
47class StringStream;
48
49class LArgument;
50class LChunk;
51class LConstantOperand;
52class LGap;
53class LInstruction;
54class LParallelMove;
55class LPointerMap;
56class LStackSlot;
57class LRegister;
58
59
60// This class represents a single point of a LOperand's lifetime.
61// For each lithium instruction there are exactly two lifetime positions:
62// the beginning and the end of the instruction. Lifetime positions for
63// different lithium instructions are disjoint.
64class LifetimePosition {
65 public:
66 // Return the lifetime position that corresponds to the beginning of
67 // the instruction with the given index.
68 static LifetimePosition FromInstructionIndex(int index) {
69 return LifetimePosition(index * kStep);
70 }
71
72 // Returns a numeric representation of this lifetime position.
73 int Value() const {
74 return value_;
75 }
76
77 // Returns the index of the instruction to which this lifetime position
78 // corresponds.
79 int InstructionIndex() const {
80 ASSERT(IsValid());
81 return value_ / kStep;
82 }
83
84 // Returns true if this lifetime position corresponds to the instruction
85 // start.
86 bool IsInstructionStart() const {
87 return (value_ & (kStep - 1)) == 0;
88 }
89
90 // Returns the lifetime position for the start of the instruction which
91 // corresponds to this lifetime position.
92 LifetimePosition InstructionStart() const {
93 ASSERT(IsValid());
94 return LifetimePosition(value_ & ~(kStep - 1));
95 }
96
97 // Returns the lifetime position for the end of the instruction which
98 // corresponds to this lifetime position.
99 LifetimePosition InstructionEnd() const {
100 ASSERT(IsValid());
101 return LifetimePosition(InstructionStart().Value() + kStep/2);
102 }
103
104 // Returns the lifetime position for the beginning of the next instruction.
105 LifetimePosition NextInstruction() const {
106 ASSERT(IsValid());
107 return LifetimePosition(InstructionStart().Value() + kStep);
108 }
109
110 // Returns the lifetime position for the beginning of the previous
111 // instruction.
112 LifetimePosition PrevInstruction() const {
113 ASSERT(IsValid());
114 ASSERT(value_ > 1);
115 return LifetimePosition(InstructionStart().Value() - kStep);
116 }
117
118 // Constructs the lifetime position which does not correspond to any
119 // instruction.
120 LifetimePosition() : value_(-1) {}
121
122 // Returns true if this lifetime positions corrensponds to some
123 // instruction.
124 bool IsValid() const { return value_ != -1; }
125
126 static inline LifetimePosition Invalid() { return LifetimePosition(); }
127
128 static inline LifetimePosition MaxPosition() {
129 // We have to use this kind of getter instead of static member due to
130 // crash bug in GDB.
131 return LifetimePosition(kMaxInt);
132 }
133
134 private:
135 static const int kStep = 2;
136
137 // Code relies on kStep being a power of two.
138 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
139
140 explicit LifetimePosition(int value) : value_(value) { }
141
142 int value_;
143};
144
145
146enum RegisterKind {
147 NONE,
148 GENERAL_REGISTERS,
149 DOUBLE_REGISTERS
150};
151
152
153class LOperand: public ZoneObject {
154 public:
155 enum Kind {
156 INVALID,
157 UNALLOCATED,
158 CONSTANT_OPERAND,
159 STACK_SLOT,
160 DOUBLE_STACK_SLOT,
161 REGISTER,
162 DOUBLE_REGISTER,
163 ARGUMENT
164 };
165
166 LOperand() : value_(KindField::encode(INVALID)) { }
167
168 Kind kind() const { return KindField::decode(value_); }
169 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
170 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
171 bool IsStackSlot() const { return kind() == STACK_SLOT; }
172 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
173 bool IsRegister() const { return kind() == REGISTER; }
174 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
175 bool IsArgument() const { return kind() == ARGUMENT; }
176 bool IsUnallocated() const { return kind() == UNALLOCATED; }
177 bool Equals(LOperand* other) const { return value_ == other->value_; }
178 int VirtualRegister();
179
180 void PrintTo(StringStream* stream);
181 void ConvertTo(Kind kind, int index) {
182 value_ = KindField::encode(kind);
183 value_ |= index << kKindFieldWidth;
184 ASSERT(this->index() == index);
185 }
186
187 protected:
188 static const int kKindFieldWidth = 3;
189 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
190
191 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
192
193 unsigned value_;
194};
195
196
197class LUnallocated: public LOperand {
198 public:
199 enum Policy {
200 NONE,
201 ANY,
202 FIXED_REGISTER,
203 FIXED_DOUBLE_REGISTER,
204 FIXED_SLOT,
205 MUST_HAVE_REGISTER,
206 WRITABLE_REGISTER,
207 SAME_AS_FIRST_INPUT,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208 IGNORE
209 };
210
211 // Lifetime of operand inside the instruction.
212 enum Lifetime {
213 // USED_AT_START operand is guaranteed to be live only at
214 // instruction start. Register allocator is free to assign the same register
215 // to some other operand used inside instruction (i.e. temporary or
216 // output).
217 USED_AT_START,
218
219 // USED_AT_END operand is treated as live until the end of
220 // instruction. This means that register allocator will not reuse it's
221 // register for any other operand inside instruction.
222 USED_AT_END
223 };
224
225 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
226 Initialize(policy, 0, USED_AT_END);
227 }
228
229 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
230 Initialize(policy, fixed_index, USED_AT_END);
231 }
232
233 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
234 Initialize(policy, 0, lifetime);
235 }
236
237 // The superclass has a KindField. Some policies have a signed fixed
238 // index in the upper bits.
239 static const int kPolicyWidth = 4;
240 static const int kLifetimeWidth = 1;
241 static const int kVirtualRegisterWidth = 17;
242
243 static const int kPolicyShift = kKindFieldWidth;
244 static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
245 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
246 static const int kFixedIndexShift =
247 kVirtualRegisterShift + kVirtualRegisterWidth;
248
249 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
250
251 class LifetimeField
252 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
253 };
254
255 class VirtualRegisterField
256 : public BitField<unsigned,
257 kVirtualRegisterShift,
258 kVirtualRegisterWidth> {
259 };
260
261 static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
262 static const int kMaxFixedIndices = 128;
263
264 bool HasIgnorePolicy() const { return policy() == IGNORE; }
265 bool HasNoPolicy() const { return policy() == NONE; }
266 bool HasAnyPolicy() const {
267 return policy() == ANY;
268 }
269 bool HasFixedPolicy() const {
270 return policy() == FIXED_REGISTER ||
271 policy() == FIXED_DOUBLE_REGISTER ||
272 policy() == FIXED_SLOT;
273 }
274 bool HasRegisterPolicy() const {
275 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
276 }
277 bool HasSameAsInputPolicy() const {
Steve Block9fac8402011-05-12 15:51:54 +0100278 return policy() == SAME_AS_FIRST_INPUT;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100279 }
280 Policy policy() const { return PolicyField::decode(value_); }
281 void set_policy(Policy policy) {
282 value_ &= ~PolicyField::mask();
283 value_ |= PolicyField::encode(policy);
284 }
285 int fixed_index() const {
286 return static_cast<int>(value_) >> kFixedIndexShift;
287 }
288
289 unsigned virtual_register() const {
290 return VirtualRegisterField::decode(value_);
291 }
292
293 void set_virtual_register(unsigned id) {
294 value_ &= ~VirtualRegisterField::mask();
295 value_ |= VirtualRegisterField::encode(id);
296 }
297
298 LUnallocated* CopyUnconstrained() {
299 LUnallocated* result = new LUnallocated(ANY);
300 result->set_virtual_register(virtual_register());
301 return result;
302 }
303
304 static LUnallocated* cast(LOperand* op) {
305 ASSERT(op->IsUnallocated());
306 return reinterpret_cast<LUnallocated*>(op);
307 }
308
309 bool IsUsedAtStart() {
310 return LifetimeField::decode(value_) == USED_AT_START;
311 }
312
313 private:
314 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
315 value_ |= PolicyField::encode(policy);
316 value_ |= LifetimeField::encode(lifetime);
317 value_ |= fixed_index << kFixedIndexShift;
318 ASSERT(this->fixed_index() == fixed_index);
319 }
320};
321
322
323class LMoveOperands BASE_EMBEDDED {
324 public:
325 LMoveOperands(LOperand* from, LOperand* to) : from_(from), to_(to) { }
326
327 LOperand* from() const { return from_; }
328 LOperand* to() const { return to_; }
329 bool IsRedundant() const {
330 return IsEliminated() || from_->Equals(to_) || IsIgnored();
331 }
332 bool IsEliminated() const { return from_ == NULL; }
333 bool IsIgnored() const {
334 if (to_ != NULL && to_->IsUnallocated() &&
335 LUnallocated::cast(to_)->HasIgnorePolicy()) {
336 return true;
337 }
338 return false;
339 }
340
341 void Eliminate() { from_ = to_ = NULL; }
342
343 private:
344 LOperand* from_;
345 LOperand* to_;
346};
347
348
349class LConstantOperand: public LOperand {
350 public:
351 static LConstantOperand* Create(int index) {
352 ASSERT(index >= 0);
353 if (index < kNumCachedOperands) return &cache[index];
354 return new LConstantOperand(index);
355 }
356
357 static LConstantOperand* cast(LOperand* op) {
358 ASSERT(op->IsConstantOperand());
359 return reinterpret_cast<LConstantOperand*>(op);
360 }
361
362 static void SetupCache();
363
364 private:
365 static const int kNumCachedOperands = 128;
366 static LConstantOperand cache[];
367
368 LConstantOperand() : LOperand() { }
369 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
370};
371
372
373class LArgument: public LOperand {
374 public:
375 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
376
377 static LArgument* cast(LOperand* op) {
378 ASSERT(op->IsArgument());
379 return reinterpret_cast<LArgument*>(op);
380 }
381};
382
383
384class LStackSlot: public LOperand {
385 public:
386 static LStackSlot* Create(int index) {
387 ASSERT(index >= 0);
388 if (index < kNumCachedOperands) return &cache[index];
389 return new LStackSlot(index);
390 }
391
392 static LStackSlot* cast(LOperand* op) {
393 ASSERT(op->IsStackSlot());
394 return reinterpret_cast<LStackSlot*>(op);
395 }
396
397 static void SetupCache();
398
399 private:
400 static const int kNumCachedOperands = 128;
401 static LStackSlot cache[];
402
403 LStackSlot() : LOperand() { }
404 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
405};
406
407
408class LDoubleStackSlot: public LOperand {
409 public:
410 static LDoubleStackSlot* Create(int index) {
411 ASSERT(index >= 0);
412 if (index < kNumCachedOperands) return &cache[index];
413 return new LDoubleStackSlot(index);
414 }
415
416 static LDoubleStackSlot* cast(LOperand* op) {
417 ASSERT(op->IsStackSlot());
418 return reinterpret_cast<LDoubleStackSlot*>(op);
419 }
420
421 static void SetupCache();
422
423 private:
424 static const int kNumCachedOperands = 128;
425 static LDoubleStackSlot cache[];
426
427 LDoubleStackSlot() : LOperand() { }
428 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
429};
430
431
432class LRegister: public LOperand {
433 public:
434 static LRegister* Create(int index) {
435 ASSERT(index >= 0);
436 if (index < kNumCachedOperands) return &cache[index];
437 return new LRegister(index);
438 }
439
440 static LRegister* cast(LOperand* op) {
441 ASSERT(op->IsRegister());
442 return reinterpret_cast<LRegister*>(op);
443 }
444
445 static void SetupCache();
446
447 private:
448 static const int kNumCachedOperands = 16;
449 static LRegister cache[];
450
451 LRegister() : LOperand() { }
452 explicit LRegister(int index) : LOperand(REGISTER, index) { }
453};
454
455
456class LDoubleRegister: public LOperand {
457 public:
458 static LDoubleRegister* Create(int index) {
459 ASSERT(index >= 0);
460 if (index < kNumCachedOperands) return &cache[index];
461 return new LDoubleRegister(index);
462 }
463
464 static LDoubleRegister* cast(LOperand* op) {
465 ASSERT(op->IsDoubleRegister());
466 return reinterpret_cast<LDoubleRegister*>(op);
467 }
468
469 static void SetupCache();
470
471 private:
472 static const int kNumCachedOperands = 16;
473 static LDoubleRegister cache[];
474
475 LDoubleRegister() : LOperand() { }
476 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
477};
478
479
480// A register-allocator view of a Lithium instruction. It contains the id of
481// the output operand and a list of input operand uses.
482class InstructionSummary: public ZoneObject {
483 public:
484 InstructionSummary()
485 : output_operand_(NULL), input_count_(0), operands_(4), is_call_(false) {}
486
487 // Output operands.
488 LOperand* Output() const { return output_operand_; }
489 void SetOutput(LOperand* output) {
490 ASSERT(output_operand_ == NULL);
491 output_operand_ = output;
492 }
493
494 // Input operands.
495 int InputCount() const { return input_count_; }
496 LOperand* InputAt(int i) const {
497 ASSERT(i < input_count_);
498 return operands_[i];
499 }
500 void AddInput(LOperand* input) {
501 operands_.InsertAt(input_count_, input);
502 input_count_++;
503 }
504
505 // Temporary operands.
506 int TempCount() const { return operands_.length() - input_count_; }
507 LOperand* TempAt(int i) const { return operands_[i + input_count_]; }
508 void AddTemp(LOperand* temp) { operands_.Add(temp); }
509
510 void MarkAsCall() { is_call_ = true; }
511 bool IsCall() const { return is_call_; }
512
513 private:
514 LOperand* output_operand_;
515 int input_count_;
516 ZoneList<LOperand*> operands_;
517 bool is_call_;
518};
519
520// Representation of the non-empty interval [start,end[.
521class UseInterval: public ZoneObject {
522 public:
523 UseInterval(LifetimePosition start, LifetimePosition end)
524 : start_(start), end_(end), next_(NULL) {
525 ASSERT(start.Value() < end.Value());
526 }
527
528 LifetimePosition start() const { return start_; }
529 LifetimePosition end() const { return end_; }
530 UseInterval* next() const { return next_; }
531
532 // Split this interval at the given position without effecting the
533 // live range that owns it. The interval must contain the position.
534 void SplitAt(LifetimePosition pos);
535
536 // If this interval intersects with other return smallest position
537 // that belongs to both of them.
538 LifetimePosition Intersect(const UseInterval* other) const {
539 if (other->start().Value() < start_.Value()) return other->Intersect(this);
540 if (other->start().Value() < end_.Value()) return other->start();
541 return LifetimePosition::Invalid();
542 }
543
544 bool Contains(LifetimePosition point) const {
545 return start_.Value() <= point.Value() && point.Value() < end_.Value();
546 }
547
548 private:
549 void set_start(LifetimePosition start) { start_ = start; }
550 void set_next(UseInterval* next) { next_ = next; }
551
552 LifetimePosition start_;
553 LifetimePosition end_;
554 UseInterval* next_;
555
556 friend class LiveRange; // Assigns to start_.
557};
558
559// Representation of a use position.
560class UsePosition: public ZoneObject {
561 public:
562 UsePosition(LifetimePosition pos, LOperand* operand)
563 : operand_(operand),
564 hint_(NULL),
565 pos_(pos),
566 next_(NULL),
567 requires_reg_(false),
568 register_beneficial_(true) {
569 if (operand_ != NULL && operand_->IsUnallocated()) {
570 LUnallocated* unalloc = LUnallocated::cast(operand_);
571 requires_reg_ = unalloc->HasRegisterPolicy();
572 register_beneficial_ = !unalloc->HasAnyPolicy();
573 }
574 ASSERT(pos_.IsValid());
575 }
576
577 LOperand* operand() const { return operand_; }
578 bool HasOperand() const { return operand_ != NULL; }
579
580 LOperand* hint() const { return hint_; }
581 void set_hint(LOperand* hint) { hint_ = hint; }
582 bool HasHint() const { return hint_ != NULL && !hint_->IsUnallocated(); }
583 bool RequiresRegister() const;
584 bool RegisterIsBeneficial() const;
585
586 LifetimePosition pos() const { return pos_; }
587 UsePosition* next() const { return next_; }
588
589 private:
590 void set_next(UsePosition* next) { next_ = next; }
591
592 LOperand* operand_;
593 LOperand* hint_;
594 LifetimePosition pos_;
595 UsePosition* next_;
596 bool requires_reg_;
597 bool register_beneficial_;
598
599 friend class LiveRange;
600};
601
602// Representation of SSA values' live ranges as a collection of (continuous)
603// intervals over the instruction ordering.
604class LiveRange: public ZoneObject {
605 public:
606 static const int kInvalidAssignment = 0x7fffffff;
607
608 explicit LiveRange(int id)
609 : id_(id),
610 spilled_(false),
611 assigned_register_(kInvalidAssignment),
612 assigned_register_kind_(NONE),
613 last_interval_(NULL),
614 first_interval_(NULL),
615 first_pos_(NULL),
616 parent_(NULL),
617 next_(NULL),
618 current_interval_(NULL),
619 last_processed_use_(NULL),
620 spill_start_index_(kMaxInt) {
621 spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
622 }
623
624 UseInterval* first_interval() const { return first_interval_; }
625 UsePosition* first_pos() const { return first_pos_; }
626 LiveRange* parent() const { return parent_; }
627 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
628 LiveRange* next() const { return next_; }
629 bool IsChild() const { return parent() != NULL; }
630 bool IsParent() const { return parent() == NULL; }
631 int id() const { return id_; }
632 bool IsFixed() const { return id_ < 0; }
633 bool IsEmpty() const { return first_interval() == NULL; }
634 LOperand* CreateAssignedOperand();
635 int assigned_register() const { return assigned_register_; }
636 int spill_start_index() const { return spill_start_index_; }
637 void set_assigned_register(int reg, RegisterKind register_kind) {
638 ASSERT(!HasRegisterAssigned() && !IsSpilled());
639 assigned_register_ = reg;
640 assigned_register_kind_ = register_kind;
641 ConvertOperands();
642 }
643 void MakeSpilled() {
644 ASSERT(!IsSpilled());
645 ASSERT(TopLevel()->HasAllocatedSpillOperand());
646 spilled_ = true;
647 assigned_register_ = kInvalidAssignment;
648 ConvertOperands();
649 }
650
651 // Returns use position in this live range that follows both start
652 // and last processed use position.
653 // Modifies internal state of live range!
654 UsePosition* NextUsePosition(LifetimePosition start);
655
656 // Returns use position for which register is required in this live
657 // range and which follows both start and last processed use position
658 // Modifies internal state of live range!
659 UsePosition* NextRegisterPosition(LifetimePosition start);
660
661 // Returns use position for which register is beneficial in this live
662 // range and which follows both start and last processed use position
663 // Modifies internal state of live range!
664 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
665
666 // Can this live range be spilled at this position.
667 bool CanBeSpilled(LifetimePosition pos);
668
669 // Split this live range at the given position which must follow the start of
670 // the range.
671 // All uses following the given position will be moved from this
672 // live range to the result live range.
673 void SplitAt(LifetimePosition position, LiveRange* result);
674
675 bool IsDouble() const { return assigned_register_kind_ == DOUBLE_REGISTERS; }
676 bool HasRegisterAssigned() const {
677 return assigned_register_ != kInvalidAssignment;
678 }
679 bool IsSpilled() const { return spilled_; }
680 UsePosition* FirstPosWithHint() const;
681
682 LOperand* FirstHint() const {
683 UsePosition* pos = FirstPosWithHint();
684 if (pos != NULL) return pos->hint();
685 return NULL;
686 }
687
688 LifetimePosition Start() const {
689 ASSERT(!IsEmpty());
690 return first_interval()->start();
691 }
692
693 LifetimePosition End() const {
694 ASSERT(!IsEmpty());
695 return last_interval_->end();
696 }
697
698 bool HasAllocatedSpillOperand() const {
699 return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
700 }
701 LOperand* GetSpillOperand() const { return spill_operand_; }
702 void SetSpillOperand(LOperand* operand) {
703 ASSERT(!operand->IsUnallocated());
704 ASSERT(spill_operand_ != NULL);
705 ASSERT(spill_operand_->IsUnallocated());
706 spill_operand_->ConvertTo(operand->kind(), operand->index());
707 }
708
709 void SetSpillStartIndex(int start) {
710 spill_start_index_ = Min(start, spill_start_index_);
711 }
712
713 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
714 bool CanCover(LifetimePosition position) const;
715 bool Covers(LifetimePosition position);
716 LifetimePosition FirstIntersection(LiveRange* other);
717
718
719 // Add a new interval or a new use position to this live range.
720 void EnsureInterval(LifetimePosition start, LifetimePosition end);
721 void AddUseInterval(LifetimePosition start, LifetimePosition end);
722 UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand);
723 UsePosition* AddUsePosition(LifetimePosition pos);
724
725 // Shorten the most recently added interval by setting a new start.
726 void ShortenTo(LifetimePosition start);
727
728#ifdef DEBUG
729 // True if target overlaps an existing interval.
730 bool HasOverlap(UseInterval* target) const;
731 void Verify() const;
732#endif
733
734 private:
735 void ConvertOperands();
736 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
737 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
738 LifetimePosition but_not_past) const;
739
740 int id_;
741 bool spilled_;
742 int assigned_register_;
743 RegisterKind assigned_register_kind_;
744 UseInterval* last_interval_;
745 UseInterval* first_interval_;
746 UsePosition* first_pos_;
747 LiveRange* parent_;
748 LiveRange* next_;
749 // This is used as a cache, it doesn't affect correctness.
750 mutable UseInterval* current_interval_;
751 UsePosition* last_processed_use_;
752 LOperand* spill_operand_;
753 int spill_start_index_;
754};
755
756
Steve Block9fac8402011-05-12 15:51:54 +0100757class GrowableBitVector BASE_EMBEDDED {
758 public:
759 GrowableBitVector() : bits_(NULL) { }
760
761 bool Contains(int value) const {
762 if (!InBitsRange(value)) return false;
763 return bits_->Contains(value);
764 }
765
766 void Add(int value) {
767 EnsureCapacity(value);
768 bits_->Add(value);
769 }
770
771 private:
772 static const int kInitialLength = 1024;
773
774 bool InBitsRange(int value) const {
775 return bits_ != NULL && bits_->length() > value;
776 }
777
778 void EnsureCapacity(int value) {
779 if (InBitsRange(value)) return;
780 int new_length = bits_ == NULL ? kInitialLength : bits_->length();
781 while (new_length <= value) new_length *= 2;
782 BitVector* new_bits = new BitVector(new_length);
783 if (bits_ != NULL) new_bits->CopyFrom(*bits_);
784 bits_ = new_bits;
785 }
786
787 BitVector* bits_;
788};
789
790
Ben Murdochb0fe1622011-05-05 13:52:32 +0100791class LAllocator BASE_EMBEDDED {
792 public:
793 explicit LAllocator(int first_virtual_register, HGraph* graph)
794 : chunk_(NULL),
795 summaries_(0),
796 next_summary_(NULL),
797 summary_stack_(2),
798 live_in_sets_(0),
799 live_ranges_(16),
800 fixed_live_ranges_(8),
801 fixed_double_live_ranges_(8),
802 unhandled_live_ranges_(8),
803 active_live_ranges_(8),
804 inactive_live_ranges_(8),
805 reusable_slots_(8),
806 next_virtual_register_(first_virtual_register),
Steve Block9fac8402011-05-12 15:51:54 +0100807 first_artificial_register_(first_virtual_register),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100808 mode_(NONE),
809 num_registers_(-1),
810 graph_(graph),
811 has_osr_entry_(false) {}
812
813 static void Setup();
814 static void TraceAlloc(const char* msg, ...);
815
816 // Lithium translation support.
817 // Record a use of an input operand in the current instruction.
818 void RecordUse(HValue* value, LUnallocated* operand);
819 // Record the definition of the output operand.
820 void RecordDefinition(HInstruction* instr, LUnallocated* operand);
821 // Record a temporary operand.
822 void RecordTemporary(LUnallocated* operand);
823
824 // Marks the current instruction as a call.
825 void MarkAsCall();
826
827 // Checks whether the value of a given virtual register is tagged.
828 bool HasTaggedValue(int virtual_register) const;
829
830 // Returns the register kind required by the given virtual register.
831 RegisterKind RequiredRegisterKind(int virtual_register) const;
832
833 // Begin a new instruction.
834 void BeginInstruction();
835
836 // Summarize the current instruction.
837 void SummarizeInstruction(int index);
838
839 // Summarize the current instruction.
840 void OmitInstruction();
841
842 // Control max function size.
843 static int max_initial_value_ids();
844
845 void Allocate(LChunk* chunk);
846
847 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
848 const ZoneList<LiveRange*>* fixed_live_ranges() const {
849 return &fixed_live_ranges_;
850 }
851 const ZoneList<LiveRange*>* fixed_double_live_ranges() const {
852 return &fixed_double_live_ranges_;
853 }
854
855 LChunk* chunk() const { return chunk_; }
856 HGraph* graph() const { return graph_; }
857
858 void MarkAsOsrEntry() {
859 // There can be only one.
860 ASSERT(!has_osr_entry_);
861 // Simply set a flag to find and process instruction later.
862 has_osr_entry_ = true;
863 }
864
865#ifdef DEBUG
866 void Verify() const;
867#endif
868
869 private:
870 void MeetRegisterConstraints();
871 void ResolvePhis();
872 void BuildLiveRanges();
873 void AllocateGeneralRegisters();
874 void AllocateDoubleRegisters();
875 void ConnectRanges();
876 void ResolveControlFlow();
877 void PopulatePointerMaps();
878 void ProcessOsrEntry();
879 void AllocateRegisters();
880 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
881 inline bool SafePointsAreInOrder() const;
882
883 // Liveness analysis support.
884 void InitializeLivenessAnalysis();
885 BitVector* ComputeLiveOut(HBasicBlock* block);
886 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
887 void ProcessInstructions(HBasicBlock* block, BitVector* live);
888 void MeetRegisterConstraints(HBasicBlock* block);
889 void MeetConstraintsBetween(InstructionSummary* first,
890 InstructionSummary* second,
891 int gap_index);
892 void ResolvePhis(HBasicBlock* block);
893
894 // Helper methods for building intervals.
895 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
896 LiveRange* LiveRangeFor(LOperand* operand);
897 void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
898 void Use(LifetimePosition block_start,
899 LifetimePosition position,
900 LOperand* operand,
901 LOperand* hint);
902 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
903
904 // Helper methods for updating the life range lists.
905 void AddToActive(LiveRange* range);
906 void AddToInactive(LiveRange* range);
907 void AddToUnhandledSorted(LiveRange* range);
908 void AddToUnhandledUnsorted(LiveRange* range);
909 void SortUnhandled();
910 bool UnhandledIsSorted();
911 void ActiveToHandled(LiveRange* range);
912 void ActiveToInactive(LiveRange* range);
913 void InactiveToHandled(LiveRange* range);
914 void InactiveToActive(LiveRange* range);
915 void FreeSpillSlot(LiveRange* range);
916 LOperand* TryReuseSpillSlot(LiveRange* range);
917
918 // Helper methods for allocating registers.
919 bool TryAllocateFreeReg(LiveRange* range);
920 void AllocateBlockedReg(LiveRange* range);
921
922 // Live range splitting helpers.
923
924 // Split the given range at the given position.
925 // If range starts at or after the given position then the
926 // original range is returned.
927 // Otherwise returns the live range that starts at pos and contains
928 // all uses from the original range that follow pos. Uses at pos will
929 // still be owned by the original range after splitting.
930 LiveRange* SplitAt(LiveRange* range, LifetimePosition pos);
931
932 // Split the given range in a position from the interval [start, end].
933 LiveRange* SplitBetween(LiveRange* range,
934 LifetimePosition start,
935 LifetimePosition end);
936
937 // Find a lifetime position in the interval [start, end] which
938 // is optimal for splitting: it is either header of the outermost
939 // loop covered by this interval or the latest possible position.
940 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
941 LifetimePosition end);
942
943 // Spill the given life range after position pos.
944 void SpillAfter(LiveRange* range, LifetimePosition pos);
945
946 // Spill the given life range after position start and up to position end.
947 void SpillBetween(LiveRange* range,
948 LifetimePosition start,
949 LifetimePosition end);
950
951 void SplitAndSpillIntersecting(LiveRange* range);
952
953 void Spill(LiveRange* range);
954 bool IsBlockBoundary(LifetimePosition pos);
955 void AddGapMove(int pos, LiveRange* prev, LiveRange* next);
956
957 // Helper methods for resolving control flow.
958 void ResolveControlFlow(LiveRange* range,
959 HBasicBlock* block,
960 HBasicBlock* pred);
961
962 // Return parallel move that should be used to connect ranges split at the
963 // given position.
964 LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
965
966 // Return the block which contains give lifetime position.
967 HBasicBlock* GetBlock(LifetimePosition pos);
968
969 // Current active summary.
970 InstructionSummary* current_summary() const { return summary_stack_.last(); }
971
972 // Get summary for given instruction index.
973 InstructionSummary* GetSummary(int index) const { return summaries_[index]; }
974
975 // Helper methods for the fixed registers.
976 int RegisterCount() const;
977 static int FixedLiveRangeID(int index) { return -index - 1; }
978 static int FixedDoubleLiveRangeID(int index);
979 LiveRange* FixedLiveRangeFor(int index);
980 LiveRange* FixedDoubleLiveRangeFor(int index);
981 LiveRange* LiveRangeFor(int index);
982 HPhi* LookupPhi(LOperand* operand) const;
983 LGap* GetLastGap(HBasicBlock* block) const;
984
985 const char* RegisterName(int allocation_index);
986
987 LChunk* chunk_;
988 ZoneList<InstructionSummary*> summaries_;
989 InstructionSummary* next_summary_;
990
991 ZoneList<InstructionSummary*> summary_stack_;
992
993 // During liveness analysis keep a mapping from block id to live_in sets
994 // for blocks already analyzed.
995 ZoneList<BitVector*> live_in_sets_;
996
997 // Liveness analysis results.
998 ZoneList<LiveRange*> live_ranges_;
999
1000 // Lists of live ranges
1001 ZoneList<LiveRange*> fixed_live_ranges_;
1002 ZoneList<LiveRange*> fixed_double_live_ranges_;
1003 ZoneList<LiveRange*> unhandled_live_ranges_;
1004 ZoneList<LiveRange*> active_live_ranges_;
1005 ZoneList<LiveRange*> inactive_live_ranges_;
1006 ZoneList<LiveRange*> reusable_slots_;
1007
1008 // Next virtual register number to be assigned to temporaries.
1009 int next_virtual_register_;
Steve Block9fac8402011-05-12 15:51:54 +01001010 int first_artificial_register_;
1011 GrowableBitVector double_artificial_registers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001012
1013 RegisterKind mode_;
1014 int num_registers_;
1015
1016 HGraph* graph_;
1017
1018 bool has_osr_entry_;
1019
1020 DISALLOW_COPY_AND_ASSIGN(LAllocator);
1021};
1022
1023
1024} } // namespace v8::internal
1025
1026#endif // V8_LITHIUM_ALLOCATOR_H_