blob: c67d60ed6efe5321ca4ee776bd1667dc7f940a69 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_REGISTER_ALLOCATOR_H_
6#define V8_REGISTER_ALLOCATOR_H_
7
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/ostreams.h"
10#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/zone-containers.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015namespace compiler {
16
Ben Murdochc5610432016-08-08 18:44:38 +010017enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018
19// This class represents a single point of a InstructionOperand's lifetime. For
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020// each instruction there are four lifetime positions:
21//
22// [[START, END], [START, END]]
23//
24// Where the first half position corresponds to
25//
26// [GapPosition::START, GapPosition::END]
27//
28// and the second half position corresponds to
29//
30// [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
31//
32class LifetimePosition final {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033 public:
34 // Return the lifetime position that corresponds to the beginning of
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 // the gap with the given index.
36 static LifetimePosition GapFromInstructionIndex(int index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 return LifetimePosition(index * kStep);
38 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 // Return the lifetime position that corresponds to the beginning of
40 // the instruction with the given index.
41 static LifetimePosition InstructionFromInstructionIndex(int index) {
42 return LifetimePosition(index * kStep + kHalfStep);
43 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044
Ben Murdochc5610432016-08-08 18:44:38 +010045 static bool ExistsGapPositionBetween(LifetimePosition pos1,
46 LifetimePosition pos2) {
47 if (pos1 > pos2) std::swap(pos1, pos2);
48 LifetimePosition next(pos1.value_ + 1);
49 if (next.IsGapPosition()) return next < pos2;
50 return next.NextFullStart() < pos2;
51 }
52
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 // Returns a numeric representation of this lifetime position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 int value() const { return value_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Returns the index of the instruction to which this lifetime position
57 // corresponds.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 int ToInstructionIndex() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 DCHECK(IsValid());
60 return value_ / kStep;
61 }
62
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 // Returns true if this lifetime position corresponds to a START value
64 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
65 // Returns true if this lifetime position corresponds to an END value
66 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
67 // Returns true if this lifetime position corresponds to a gap START value
68 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 bool IsGapPosition() const { return (value_ & 0x2) == 0; }
71 bool IsInstructionPosition() const { return !IsGapPosition(); }
72
73 // Returns the lifetime position for the current START.
74 LifetimePosition Start() const {
75 DCHECK(IsValid());
76 return LifetimePosition(value_ & ~(kHalfStep - 1));
77 }
78
79 // Returns the lifetime position for the current gap START.
80 LifetimePosition FullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000081 DCHECK(IsValid());
82 return LifetimePosition(value_ & ~(kStep - 1));
83 }
84
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 // Returns the lifetime position for the current END.
86 LifetimePosition End() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 return LifetimePosition(Start().value_ + kHalfStep / 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 }
90
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000091 // Returns the lifetime position for the beginning of the next START.
92 LifetimePosition NextStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 return LifetimePosition(Start().value_ + kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 }
96
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000097 // Returns the lifetime position for the beginning of the next gap START.
98 LifetimePosition NextFullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 return LifetimePosition(FullStart().value_ + kStep);
101 }
102
103 // Returns the lifetime position for the beginning of the previous START.
104 LifetimePosition PrevStart() const {
105 DCHECK(IsValid());
106 DCHECK(value_ >= kHalfStep);
107 return LifetimePosition(Start().value_ - kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108 }
109
110 // Constructs the lifetime position which does not correspond to any
111 // instruction.
112 LifetimePosition() : value_(-1) {}
113
114 // Returns true if this lifetime positions corrensponds to some
115 // instruction.
116 bool IsValid() const { return value_ != -1; }
117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 bool operator<(const LifetimePosition& that) const {
119 return this->value_ < that.value_;
120 }
121
122 bool operator<=(const LifetimePosition& that) const {
123 return this->value_ <= that.value_;
124 }
125
126 bool operator==(const LifetimePosition& that) const {
127 return this->value_ == that.value_;
128 }
129
130 bool operator!=(const LifetimePosition& that) const {
131 return this->value_ != that.value_;
132 }
133
134 bool operator>(const LifetimePosition& that) const {
135 return this->value_ > that.value_;
136 }
137
138 bool operator>=(const LifetimePosition& that) const {
139 return this->value_ >= that.value_;
140 }
141
142 void Print() const;
143
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 static inline LifetimePosition Invalid() { return LifetimePosition(); }
145
146 static inline LifetimePosition MaxPosition() {
147 // We have to use this kind of getter instead of static member due to
148 // crash bug in GDB.
149 return LifetimePosition(kMaxInt);
150 }
151
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 static inline LifetimePosition FromInt(int value) {
153 return LifetimePosition(value);
154 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156 private:
157 static const int kHalfStep = 2;
158 static const int kStep = 2 * kHalfStep;
159
160 // Code relies on kStep and kHalfStep being a power of two.
161 STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162
163 explicit LifetimePosition(int value) : value_(value) {}
164
165 int value_;
166};
167
168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
170
171
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172// Representation of the non-empty interval [start,end[.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173class UseInterval final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 public:
175 UseInterval(LifetimePosition start, LifetimePosition end)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 : start_(start), end_(end), next_(nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 DCHECK(start < end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 }
179
180 LifetimePosition start() const { return start_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 void set_start(LifetimePosition start) { start_ = start; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182 LifetimePosition end() const { return end_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183 void set_end(LifetimePosition end) { end_ = end; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 UseInterval* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 void set_next(UseInterval* next) { next_ = next; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000186
187 // Split this interval at the given position without effecting the
188 // live range that owns it. The interval must contain the position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190
191 // If this interval intersects with other return smallest position
192 // that belongs to both of them.
193 LifetimePosition Intersect(const UseInterval* other) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194 if (other->start() < start_) return other->Intersect(this);
195 if (other->start() < end_) return other->start();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 return LifetimePosition::Invalid();
197 }
198
199 bool Contains(LifetimePosition point) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 return start_ <= point && point < end_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000201 }
202
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 // Returns the index of the first gap covered by this interval.
204 int FirstGapIndex() const {
205 int ret = start_.ToInstructionIndex();
206 if (start_.IsInstructionPosition()) {
207 ++ret;
208 }
209 return ret;
210 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000212 // Returns the index of the last gap covered by this interval.
213 int LastGapIndex() const {
214 int ret = end_.ToInstructionIndex();
215 if (end_.IsGapPosition() && end_.IsStart()) {
216 --ret;
217 }
218 return ret;
219 }
220
221 private:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 LifetimePosition start_;
223 LifetimePosition end_;
224 UseInterval* next_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400226 DISALLOW_COPY_AND_ASSIGN(UseInterval);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227};
228
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400229
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
231
232
233enum class UsePositionHintType : uint8_t {
234 kNone,
235 kOperand,
236 kUsePos,
237 kPhi,
238 kUnresolved
239};
240
241
242static const int32_t kUnassignedRegister =
243 RegisterConfiguration::kMaxGeneralRegisters;
244
Ben Murdochc5610432016-08-08 18:44:38 +0100245static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 "kUnassignedRegister too small");
247
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248// Representation of a use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249class UsePosition final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
252 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253
254 InstructionOperand* operand() const { return operand_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400255 bool HasOperand() const { return operand_ != nullptr; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 bool RegisterIsBeneficial() const {
258 return RegisterBeneficialField::decode(flags_);
259 }
260 UsePositionType type() const { return TypeField::decode(flags_); }
261 void set_type(UsePositionType type, bool register_beneficial);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
263 LifetimePosition pos() const { return pos_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 UsePosition* next() const { return next_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 void set_next(UsePosition* next) { next_ = next; }
267
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 // For hinting only.
269 void set_assigned_register(int register_code) {
270 flags_ = AssignedRegisterField::update(flags_, register_code);
271 }
272
273 UsePositionHintType hint_type() const {
274 return HintTypeField::decode(flags_);
275 }
276 bool HasHint() const;
277 bool HintRegister(int* register_code) const;
278 void ResolveHint(UsePosition* use_pos);
279 bool IsResolved() const {
280 return hint_type() != UsePositionHintType::kUnresolved;
281 }
282 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400283
284 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 typedef BitField<UsePositionType, 0, 2> TypeField;
286 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
287 typedef BitField<bool, 5, 1> RegisterBeneficialField;
288 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
289
290 InstructionOperand* const operand_;
291 void* hint_;
292 UsePosition* next_;
293 LifetimePosition const pos_;
294 uint32_t flags_;
295
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400296 DISALLOW_COPY_AND_ASSIGN(UsePosition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297};
298
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400300class SpillRange;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301class RegisterAllocationData;
302class TopLevelLiveRange;
303class LiveRangeGroup;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400304
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305// Representation of SSA values' live ranges as a collection of (continuous)
306// intervals over the instruction ordering.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307class LiveRange : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000309 UseInterval* first_interval() const { return first_interval_; }
310 UsePosition* first_pos() const { return first_pos_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311 TopLevelLiveRange* TopLevel() { return top_level_; }
312 const TopLevelLiveRange* TopLevel() const { return top_level_; }
313
314 bool IsTopLevel() const;
315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 LiveRange* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000317
318 int relative_id() const { return relative_id_; }
319
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320 bool IsEmpty() const { return first_interval() == nullptr; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321
322 InstructionOperand GetAssignedOperand() const;
323
324 MachineRepresentation representation() const {
325 return RepresentationField::decode(bits_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 }
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
329 bool HasRegisterAssigned() const {
330 return assigned_register() != kUnassignedRegister;
331 }
332 void set_assigned_register(int reg);
333 void UnsetAssignedRegister();
334
335 bool spilled() const { return SpilledField::decode(bits_); }
336 void Spill();
337
338 RegisterKind kind() const;
339
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340 // Returns use position in this live range that follows both start
341 // and last processed use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 UsePosition* NextUsePosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343
344 // Returns use position for which register is required in this live
345 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 UsePosition* NextRegisterPosition(LifetimePosition start) const;
347
348 // Returns the first use position requiring stack slot, or nullptr.
349 UsePosition* NextSlotPosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350
351 // Returns use position for which register is beneficial in this live
352 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000353 UsePosition* NextUsePositionRegisterIsBeneficial(
354 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355
356 // Returns use position for which register is beneficial in this live
357 // range and which precedes start.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 UsePosition* PreviousUsePositionRegisterIsBeneficial(
359 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360
361 // Can this live range be spilled at this position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 bool CanBeSpilled(LifetimePosition pos) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 // Splitting primitive used by both splitting and splintering members.
365 // Performs the split, but does not link the resulting ranges.
366 // The given position must follow the start of the range.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 // All uses following the given position will be moved from this
368 // live range to the result live range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 // The current range will terminate at position, while result will start from
370 // position.
371 UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
372 Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // Detaches at position, and then links the resulting ranges. Returns the
375 // child, which starts at position.
376 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378 // Returns nullptr when no register is hinted, otherwise sets register_index.
379 UsePosition* FirstHintPosition(int* register_index) const;
380 UsePosition* FirstHintPosition() const {
381 int register_index;
382 return FirstHintPosition(&register_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384
385 UsePosition* current_hint_position() const {
386 DCHECK(current_hint_position_ == FirstHintPosition());
387 return current_hint_position_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000388 }
389
390 LifetimePosition Start() const {
391 DCHECK(!IsEmpty());
392 return first_interval()->start();
393 }
394
395 LifetimePosition End() const {
396 DCHECK(!IsEmpty());
397 return last_interval_->end();
398 }
399
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
401 bool CanCover(LifetimePosition position) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 bool Covers(LifetimePosition position) const;
403 LifetimePosition FirstIntersection(LiveRange* other) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 void VerifyChildStructure() const {
406 VerifyIntervals();
407 VerifyPositions();
408 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000409
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410 void ConvertUsesToOperand(const InstructionOperand& op,
411 const InstructionOperand& spill_op);
412 void SetUseHints(int register_index);
413 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 // Used solely by the Greedy Allocator:
416 unsigned GetSize();
417 float weight() const { return weight_; }
418 void set_weight(float weight) { weight_ = weight; }
419 LiveRangeGroup* group() const { return group_; }
420 void set_group(LiveRangeGroup* group) { group_ = group; }
421 void Print(const RegisterConfiguration* config, bool with_children) const;
422 void Print(bool with_children) const;
423
424 static const int kInvalidSize = -1;
425 static const float kInvalidWeight;
426 static const float kMaxWeight;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427
428 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429 friend class TopLevelLiveRange;
430 explicit LiveRange(int relative_id, MachineRepresentation rep,
431 TopLevelLiveRange* top_level);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400432
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
434
435 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
436
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
438 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
439 LifetimePosition but_not_past) const;
440
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000441 void VerifyPositions() const;
442 void VerifyIntervals() const;
443
444 typedef BitField<bool, 0, 1> SpilledField;
445 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
446 typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
447
448 // Unique among children and splinters of the same virtual register.
449 int relative_id_;
450 uint32_t bits_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000451 UseInterval* last_interval_;
452 UseInterval* first_interval_;
453 UsePosition* first_pos_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 TopLevelLiveRange* top_level_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 LiveRange* next_;
456 // This is used as a cache, it doesn't affect correctness.
457 mutable UseInterval* current_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458 // This is used as a cache, it doesn't affect correctness.
459 mutable UsePosition* last_processed_use_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 // This is used as a cache, it's invalid outside of BuildLiveRanges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 mutable UsePosition* current_hint_position_;
462 // Cache the last position splintering stopped at.
463 mutable UsePosition* splitting_pointer_;
464 // greedy: the number of LifetimePositions covered by this range. Used to
465 // prioritize selecting live ranges for register assignment, as well as
466 // in weight calculations.
467 int size_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 // greedy: a metric for resolving conflicts between ranges with an assigned
470 // register and ranges that intersect them and need a register.
471 float weight_;
472
473 // greedy: groupping
474 LiveRangeGroup* group_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400475
476 DISALLOW_COPY_AND_ASSIGN(LiveRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477};
478
479
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480class LiveRangeGroup final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
483 ZoneVector<LiveRange*>& ranges() { return ranges_; }
484 const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
485
486 // TODO(mtrofin): populate assigned register and use in weight calculation.
487 int assigned_register() const { return assigned_register_; }
488 void set_assigned_register(int reg) { assigned_register_ = reg; }
489
490 private:
491 ZoneVector<LiveRange*> ranges_;
492 int assigned_register_;
493 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
494};
495
496
497class TopLevelLiveRange final : public LiveRange {
498 public:
499 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
500 int spill_start_index() const { return spill_start_index_; }
501
502 bool IsFixed() const { return vreg_ < 0; }
503
504 bool is_phi() const { return IsPhiField::decode(bits_); }
505 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
506
507 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
508 void set_is_non_loop_phi(bool value) {
509 bits_ = IsNonLoopPhiField::update(bits_, value);
510 }
511
512 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
513 void set_has_slot_use(bool value) {
514 bits_ = HasSlotUseField::update(bits_, value);
515 }
516
517 // Add a new interval or a new use position to this live range.
518 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
519 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
520 void AddUsePosition(UsePosition* pos);
521
522 // Shorten the most recently added interval by setting a new start.
523 void ShortenTo(LifetimePosition start);
524
525 // Detaches between start and end, and attributes the resulting range to
526 // result.
527 // The current range is pointed to as "splintered_from". No parent/child
528 // relationship is established between this and result.
529 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
530
531 // Assuming other was splintered from this range, embeds other and its
532 // children as part of the children sequence of this range.
533 void Merge(TopLevelLiveRange* other, Zone* zone);
534
535 // Spill range management.
536 void SetSpillRange(SpillRange* spill_range);
537 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
538 void set_spill_type(SpillType value) {
539 bits_ = SpillTypeField::update(bits_, value);
540 }
541 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
542 InstructionOperand* GetSpillOperand() const {
543 DCHECK(spill_type() == SpillType::kSpillOperand);
544 return spill_operand_;
545 }
546
547 SpillRange* GetAllocatedSpillRange() const {
548 DCHECK(spill_type() != SpillType::kSpillOperand);
549 return spill_range_;
550 }
551
552 SpillRange* GetSpillRange() const {
553 DCHECK(spill_type() == SpillType::kSpillRange);
554 return spill_range_;
555 }
556 bool HasNoSpillType() const {
557 return spill_type() == SpillType::kNoSpillType;
558 }
559 bool HasSpillOperand() const {
560 return spill_type() == SpillType::kSpillOperand;
561 }
562 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
563
564 AllocatedOperand GetSpillRangeOperand() const;
565
566 void RecordSpillLocation(Zone* zone, int gap_index,
567 InstructionOperand* operand);
568 void SetSpillOperand(InstructionOperand* operand);
569 void SetSpillStartIndex(int start) {
570 spill_start_index_ = Min(start, spill_start_index_);
571 }
572
573 void CommitSpillMoves(InstructionSequence* sequence,
574 const InstructionOperand& operand,
575 bool might_be_duplicated);
576
577 // If all the children of this range are spilled in deferred blocks, and if
578 // for any non-spilled child with a use position requiring a slot, that range
579 // is contained in a deferred block, mark the range as
580 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
581 // and instead let the LiveRangeConnector perform the spills within the
582 // deferred blocks. If so, we insert here spills for non-spilled ranges
583 // with slot use positions.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100584 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585 spill_start_index_ = -1;
586 spilled_in_deferred_blocks_ = true;
587 spill_move_insertion_locations_ = nullptr;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100588 list_of_blocks_requiring_spill_operands_ =
589 new (zone) BitVector(total_block_count, zone);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 }
591
Ben Murdoch097c5b22016-05-18 11:27:45 +0100592 void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
593 const InstructionOperand& spill_operand,
594 BitVector* necessary_spill_points);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595
596 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
597 bool IsSplinter() const { return splintered_from_ != nullptr; }
598 bool MayRequireSpillRange() const {
599 DCHECK(!IsSplinter());
600 return !HasSpillOperand() && spill_range_ == nullptr;
601 }
602 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
603 int vreg() const { return vreg_; }
604
605#if DEBUG
606 int debug_virt_reg() const;
607#endif
608
609 void Verify() const;
610 void VerifyChildrenInOrder() const;
611
612 int GetNextChildId() {
613 return IsSplinter() ? splintered_from()->GetNextChildId()
614 : ++last_child_id_;
615 }
616
617 int GetChildCount() const { return last_child_id_ + 1; }
618
619 bool IsSpilledOnlyInDeferredBlocks() const {
620 return spilled_in_deferred_blocks_;
621 }
622
623 struct SpillMoveInsertionList;
624
Ben Murdoch097c5b22016-05-18 11:27:45 +0100625 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
626 DCHECK(!IsSpilledOnlyInDeferredBlocks());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000627 return spill_move_insertion_locations_;
628 }
629 TopLevelLiveRange* splinter() const { return splinter_; }
630 void SetSplinter(TopLevelLiveRange* splinter) {
631 DCHECK_NULL(splinter_);
632 DCHECK_NOT_NULL(splinter);
633
634 splinter_ = splinter;
635 splinter->relative_id_ = GetNextChildId();
636 splinter->set_spill_type(spill_type());
637 splinter->SetSplinteredFrom(this);
638 }
639
640 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
641 bool has_preassigned_slot() const { return has_preassigned_slot_; }
642
Ben Murdoch097c5b22016-05-18 11:27:45 +0100643 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
644 DCHECK(IsSpilledOnlyInDeferredBlocks());
645 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
646 }
647
648 BitVector* GetListOfBlocksRequiringSpillOperands() const {
649 DCHECK(IsSpilledOnlyInDeferredBlocks());
650 return list_of_blocks_requiring_spill_operands_;
651 }
652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000653 private:
654 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
655
656 typedef BitField<bool, 1, 1> HasSlotUseField;
657 typedef BitField<bool, 2, 1> IsPhiField;
658 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
659 typedef BitField<SpillType, 4, 2> SpillTypeField;
660
661 int vreg_;
662 int last_child_id_;
663 TopLevelLiveRange* splintered_from_;
664 union {
665 // Correct value determined by spill_type()
666 InstructionOperand* spill_operand_;
667 SpillRange* spill_range_;
668 };
Ben Murdoch097c5b22016-05-18 11:27:45 +0100669
670 union {
671 SpillMoveInsertionList* spill_move_insertion_locations_;
672 BitVector* list_of_blocks_requiring_spill_operands_;
673 };
674
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000675 // TODO(mtrofin): generalize spilling after definition, currently specialized
676 // just for spill in a single deferred block.
677 bool spilled_in_deferred_blocks_;
678 int spill_start_index_;
679 UsePosition* last_pos_;
680 TopLevelLiveRange* splinter_;
681 bool has_preassigned_slot_;
682
683 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
684};
685
686
687struct PrintableLiveRange {
688 const RegisterConfiguration* register_configuration_;
689 const LiveRange* range_;
690};
691
692
693std::ostream& operator<<(std::ostream& os,
694 const PrintableLiveRange& printable_range);
695
696
697class SpillRange final : public ZoneObject {
698 public:
699 static const int kUnassignedSlot = -1;
700 SpillRange(TopLevelLiveRange* range, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 UseInterval* interval() const { return use_interval_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 // Currently, only 4 or 8 byte slots are supported.
704 int ByteWidth() const;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400705 bool IsEmpty() const { return live_ranges_.empty(); }
706 bool TryMerge(SpillRange* other);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
708
709 void set_assigned_slot(int index) {
710 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
711 assigned_slot_ = index;
712 }
713 int assigned_slot() {
714 DCHECK_NE(kUnassignedSlot, assigned_slot_);
715 return assigned_slot_;
716 }
717 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
718 return live_ranges_;
719 }
720 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
721 int byte_width() const { return byte_width_; }
722 RegisterKind kind() const { return kind_; }
723 void Print() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400725 private:
726 LifetimePosition End() const { return end_position_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400727 bool IsIntersectingWith(SpillRange* other) const;
728 // Merge intervals, making sure the use intervals are sorted
729 void MergeDisjointIntervals(UseInterval* other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000731 ZoneVector<TopLevelLiveRange*> live_ranges_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400732 UseInterval* use_interval_;
733 LifetimePosition end_position_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 int assigned_slot_;
735 int byte_width_;
736 RegisterKind kind_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400738 DISALLOW_COPY_AND_ASSIGN(SpillRange);
739};
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742class RegisterAllocationData final : public ZoneObject {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400743 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 class PhiMapValue : public ZoneObject {
745 public:
746 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400747
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748 const PhiInstruction* phi() const { return phi_; }
749 const InstructionBlock* block() const { return block_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000751 // For hinting.
752 int assigned_register() const { return assigned_register_; }
753 void set_assigned_register(int register_code) {
754 DCHECK_EQ(assigned_register_, kUnassignedRegister);
755 assigned_register_ = register_code;
756 }
757 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
758
759 void AddOperand(InstructionOperand* operand);
760 void CommitAssignment(const InstructionOperand& operand);
761
762 private:
763 PhiInstruction* const phi_;
764 const InstructionBlock* const block_;
765 ZoneVector<InstructionOperand*> incoming_operands_;
766 int assigned_register_;
767 };
768 typedef ZoneMap<int, PhiMapValue*> PhiMap;
769
770 struct DelayedReference {
771 ReferenceMap* map;
772 InstructionOperand* operand;
773 };
774 typedef ZoneVector<DelayedReference> DelayedReferences;
775 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
776 RangesWithPreassignedSlots;
777
778 RegisterAllocationData(const RegisterConfiguration* config,
779 Zone* allocation_zone, Frame* frame,
780 InstructionSequence* code,
781 const char* debug_name = nullptr);
782
783 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
784 return live_ranges_;
785 }
786 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
787 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400788 return fixed_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000789 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000790 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
791 return fixed_live_ranges_;
792 }
793 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400794 return fixed_double_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000796 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
797 return fixed_double_live_ranges_;
798 }
799 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
800 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
801 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
802 DelayedReferences& delayed_references() { return delayed_references_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400803 InstructionSequence* code() const { return code_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 // This zone is for datastructures only needed during register allocation
805 // phases.
806 Zone* allocation_zone() const { return allocation_zone_; }
807 // This zone is for InstructionOperands and moves that live beyond register
808 // allocation.
809 Zone* code_zone() const { return code()->zone(); }
810 Frame* frame() const { return frame_; }
811 const char* debug_name() const { return debug_name_; }
812 const RegisterConfiguration* config() const { return config_; }
813
814 MachineRepresentation RepresentationFor(int virtual_register);
815
816 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
817 // Creates a new live range.
818 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
819 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
820
821 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
822 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
823
824 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
825 const InstructionOperand& from,
826 const InstructionOperand& to);
827
828 bool IsReference(TopLevelLiveRange* top_range) const {
829 return code()->IsReference(top_range->vreg());
830 }
831
832 bool ExistsUseWithoutDefinition();
833 bool RangesDefinedInDeferredStayInDeferred();
834
835 void MarkAllocated(RegisterKind kind, int index);
836
837 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
838 PhiInstruction* phi);
839 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
840 PhiMapValue* GetPhiMapValueFor(int virtual_register);
841 bool IsBlockBoundary(LifetimePosition pos) const;
842
843 RangesWithPreassignedSlots& preassigned_slot_ranges() {
844 return preassigned_slot_ranges_;
845 }
846
847 private:
848 int GetNextLiveRangeId();
849
850 Zone* const allocation_zone_;
851 Frame* const frame_;
852 InstructionSequence* const code_;
853 const char* const debug_name_;
854 const RegisterConfiguration* const config_;
855 PhiMap phi_map_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000856 ZoneVector<BitVector*> live_in_sets_;
857 ZoneVector<BitVector*> live_out_sets_;
858 ZoneVector<TopLevelLiveRange*> live_ranges_;
859 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
860 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
861 ZoneVector<SpillRange*> spill_ranges_;
862 DelayedReferences delayed_references_;
863 BitVector* assigned_registers_;
864 BitVector* assigned_double_registers_;
865 int virtual_register_count_;
866 RangesWithPreassignedSlots preassigned_slot_ranges_;
867
868 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
869};
870
871
872class ConstraintBuilder final : public ZoneObject {
873 public:
874 explicit ConstraintBuilder(RegisterAllocationData* data);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000875
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400876 // Phase 1 : insert moves to account for fixed register operands.
877 void MeetRegisterConstraints();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400879 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
880 // of blocks containing phis.
881 void ResolvePhis();
882
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400883 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000884 RegisterAllocationData* data() const { return data_; }
885 InstructionSequence* code() const { return data()->code(); }
886 Zone* allocation_zone() const { return data()->allocation_zone(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000888 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
889 bool is_tagged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400890 void MeetRegisterConstraints(const InstructionBlock* block);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 void MeetConstraintsBefore(int index);
892 void MeetConstraintsAfter(int index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400893 void MeetRegisterConstraintsForLastInstructionInBlock(
894 const InstructionBlock* block);
895 void ResolvePhis(const InstructionBlock* block);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897 RegisterAllocationData* const data_;
898
899 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
900};
901
902
903class LiveRangeBuilder final : public ZoneObject {
904 public:
905 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
906
907 // Phase 3: compute liveness of all virtual register.
908 void BuildLiveRanges();
909 static BitVector* ComputeLiveOut(const InstructionBlock* block,
910 RegisterAllocationData* data);
911
912 private:
913 RegisterAllocationData* data() const { return data_; }
914 InstructionSequence* code() const { return data()->code(); }
915 Zone* allocation_zone() const { return data()->allocation_zone(); }
916 Zone* code_zone() const { return code()->zone(); }
917 const RegisterConfiguration* config() const { return data()->config(); }
918 ZoneVector<BitVector*>& live_in_sets() const {
919 return data()->live_in_sets();
920 }
921
Ben Murdochda12d292016-06-02 14:46:10 +0100922 // Verification.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923 void Verify() const;
Ben Murdochda12d292016-06-02 14:46:10 +0100924 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
925 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
926 const TopLevelLiveRange* range) const;
927 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928
929 // Liveness analysis support.
930 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
931 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
932 void ProcessPhis(const InstructionBlock* block, BitVector* live);
933 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
934
935 static int FixedLiveRangeID(int index) { return -index - 1; }
936 int FixedDoubleLiveRangeID(int index);
937 TopLevelLiveRange* FixedLiveRangeFor(int index);
938 TopLevelLiveRange* FixedDoubleLiveRangeFor(int index);
939
940 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
941 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
942
943 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
944 void* hint, UsePositionHintType hint_type);
945 UsePosition* NewUsePosition(LifetimePosition pos) {
946 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
947 }
948 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000949 // Helper methods for building intervals.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
951 void* hint, UsePositionHintType hint_type);
952 void Define(LifetimePosition position, InstructionOperand* operand) {
953 Define(position, operand, nullptr, UsePositionHintType::kNone);
954 }
955 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
956 InstructionOperand* operand, void* hint,
957 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 void Use(LifetimePosition block_start, LifetimePosition position,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 InstructionOperand* operand) {
960 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
961 }
962
963 RegisterAllocationData* const data_;
964 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
965
966 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
967};
968
969
970class RegisterAllocator : public ZoneObject {
971 public:
972 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
973
974 protected:
975 RegisterAllocationData* data() const { return data_; }
976 InstructionSequence* code() const { return data()->code(); }
977 RegisterKind mode() const { return mode_; }
978 int num_registers() const { return num_registers_; }
979 int num_allocatable_registers() const { return num_allocatable_registers_; }
980 int allocatable_register_code(int allocatable_index) const {
981 return allocatable_register_codes_[allocatable_index];
982 }
983
984 // TODO(mtrofin): explain why splitting in gap START is always OK.
985 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
986 int instruction_index);
987
988 Zone* allocation_zone() const { return data()->allocation_zone(); }
989
990 // Find the optimal split for ranges defined by a memory operand, e.g.
991 // constants or function parameters passed on the stack.
992 void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only);
993
994 // Split the given range at the given position.
995 // If range starts at or after the given position then the
996 // original range is returned.
997 // Otherwise returns the live range that starts at pos and contains
998 // all uses from the original range that follow pos. Uses at pos will
999 // still be owned by the original range after splitting.
1000 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1001
1002 bool CanProcessRange(LiveRange* range) const {
1003 return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1004 }
1005
1006
1007 // Split the given range in a position from the interval [start, end].
1008 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1009 LifetimePosition end);
1010
1011 // Find a lifetime position in the interval [start, end] which
1012 // is optimal for splitting: it is either header of the outermost
1013 // loop covered by this interval or the latest possible position.
1014 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1015 LifetimePosition end);
1016
1017 void Spill(LiveRange* range);
1018
1019 // If we are trying to spill a range inside the loop try to
1020 // hoist spill position out to the point just before the loop.
1021 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1022 LifetimePosition pos);
1023
1024 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1025 const char* RegisterName(int allocation_index) const;
1026
1027 private:
1028 RegisterAllocationData* const data_;
1029 const RegisterKind mode_;
1030 const int num_registers_;
1031 int num_allocatable_registers_;
1032 const int* allocatable_register_codes_;
1033
1034 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1035};
1036
1037
1038class LinearScanAllocator final : public RegisterAllocator {
1039 public:
1040 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1041 Zone* local_zone);
1042
1043 // Phase 4: compute register assignments.
1044 void AllocateRegisters();
1045
1046 private:
1047 ZoneVector<LiveRange*>& unhandled_live_ranges() {
1048 return unhandled_live_ranges_;
1049 }
1050 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1051 ZoneVector<LiveRange*>& inactive_live_ranges() {
1052 return inactive_live_ranges_;
1053 }
1054
1055 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056
1057 // Helper methods for updating the life range lists.
1058 void AddToActive(LiveRange* range);
1059 void AddToInactive(LiveRange* range);
1060 void AddToUnhandledSorted(LiveRange* range);
1061 void AddToUnhandledUnsorted(LiveRange* range);
1062 void SortUnhandled();
1063 bool UnhandledIsSorted();
1064 void ActiveToHandled(LiveRange* range);
1065 void ActiveToInactive(LiveRange* range);
1066 void InactiveToHandled(LiveRange* range);
1067 void InactiveToActive(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068
1069 // Helper methods for allocating registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001070 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001071 bool TryAllocateFreeReg(LiveRange* range);
1072 void AllocateBlockedReg(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001073
1074 // Spill the given life range after position pos.
1075 void SpillAfter(LiveRange* range, LifetimePosition pos);
1076
1077 // Spill the given life range after position [start] and up to position [end].
1078 void SpillBetween(LiveRange* range, LifetimePosition start,
1079 LifetimePosition end);
1080
1081 // Spill the given life range after position [start] and up to position [end].
1082 // Range is guaranteed to be spilled at least until position [until].
1083 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1084 LifetimePosition until, LifetimePosition end);
1085
1086 void SplitAndSpillIntersecting(LiveRange* range);
1087
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001088 ZoneVector<LiveRange*> unhandled_live_ranges_;
1089 ZoneVector<LiveRange*> active_live_ranges_;
1090 ZoneVector<LiveRange*> inactive_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001091
1092#ifdef DEBUG
1093 LifetimePosition allocation_finger_;
1094#endif
1095
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001096 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1097};
1098
1099
1100class SpillSlotLocator final : public ZoneObject {
1101 public:
1102 explicit SpillSlotLocator(RegisterAllocationData* data);
1103
1104 void LocateSpillSlots();
1105
1106 private:
1107 RegisterAllocationData* data() const { return data_; }
1108
1109 RegisterAllocationData* const data_;
1110
1111 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1112};
1113
1114
1115class OperandAssigner final : public ZoneObject {
1116 public:
1117 explicit OperandAssigner(RegisterAllocationData* data);
1118
1119 // Phase 5: assign spill splots.
1120 void AssignSpillSlots();
1121
1122 // Phase 6: commit assignment.
1123 void CommitAssignment();
1124
1125 private:
1126 RegisterAllocationData* data() const { return data_; }
1127
1128 RegisterAllocationData* const data_;
1129
1130 DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1131};
1132
1133
1134class ReferenceMapPopulator final : public ZoneObject {
1135 public:
1136 explicit ReferenceMapPopulator(RegisterAllocationData* data);
1137
1138 // Phase 7: compute values for pointer maps.
1139 void PopulateReferenceMaps();
1140
1141 private:
1142 RegisterAllocationData* data() const { return data_; }
1143
1144 bool SafePointsAreInOrder() const;
1145
1146 RegisterAllocationData* const data_;
1147
1148 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1149};
1150
1151
Ben Murdoch097c5b22016-05-18 11:27:45 +01001152class LiveRangeBoundArray;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153// Insert moves of the form
1154//
1155// Operand(child_(k+1)) = Operand(child_k)
1156//
1157// where child_k and child_(k+1) are consecutive children of a range (so
1158// child_k->next() == child_(k+1)), and Operand(...) refers to the
1159// assigned operand, be it a register or a slot.
1160class LiveRangeConnector final : public ZoneObject {
1161 public:
1162 explicit LiveRangeConnector(RegisterAllocationData* data);
1163
1164 // Phase 8: reconnect split ranges with moves, when the control flow
1165 // between the ranges is trivial (no branches).
1166 void ConnectRanges(Zone* local_zone);
1167
1168 // Phase 9: insert moves to connect ranges across basic blocks, when the
1169 // control flow between them cannot be trivially resolved, such as joining
1170 // branches.
1171 void ResolveControlFlow(Zone* local_zone);
1172
1173 private:
1174 RegisterAllocationData* data() const { return data_; }
1175 InstructionSequence* code() const { return data()->code(); }
1176 Zone* code_zone() const { return code()->zone(); }
1177
1178 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1179
1180 int ResolveControlFlow(const InstructionBlock* block,
1181 const InstructionOperand& cur_op,
1182 const InstructionBlock* pred,
1183 const InstructionOperand& pred_op);
1184
Ben Murdoch097c5b22016-05-18 11:27:45 +01001185 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1186 LiveRangeBoundArray* array,
1187 Zone* temp_zone);
1188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 RegisterAllocationData* const data_;
1190
1191 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001192};
1193
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001194} // namespace compiler
1195} // namespace internal
1196} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001197
1198#endif // V8_REGISTER_ALLOCATOR_H_