blob: d6ed0052704024939cdeb154b06210d62a8d7fa1 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_REGISTER_ALLOCATOR_H_
6#define V8_REGISTER_ALLOCATOR_H_
7
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/ostreams.h"
10#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/zone-containers.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015namespace compiler {
16
17enum RegisterKind {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018 GENERAL_REGISTERS,
19 DOUBLE_REGISTERS
20};
21
22
23// This class represents a single point of a InstructionOperand's lifetime. For
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024// each instruction there are four lifetime positions:
25//
26// [[START, END], [START, END]]
27//
28// Where the first half position corresponds to
29//
30// [GapPosition::START, GapPosition::END]
31//
32// and the second half position corresponds to
33//
34// [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
35//
36class LifetimePosition final {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 public:
38 // Return the lifetime position that corresponds to the beginning of
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 // the gap with the given index.
40 static LifetimePosition GapFromInstructionIndex(int index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 return LifetimePosition(index * kStep);
42 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 // Return the lifetime position that corresponds to the beginning of
44 // the instruction with the given index.
45 static LifetimePosition InstructionFromInstructionIndex(int index) {
46 return LifetimePosition(index * kStep + kHalfStep);
47 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048
49 // Returns a numeric representation of this lifetime position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050 int value() const { return value_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051
52 // Returns the index of the instruction to which this lifetime position
53 // corresponds.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 int ToInstructionIndex() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(IsValid());
56 return value_ / kStep;
57 }
58
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 // Returns true if this lifetime position corresponds to a START value
60 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
61 // Returns true if this lifetime position corresponds to an END value
62 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
63 // Returns true if this lifetime position corresponds to a gap START value
64 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000066 bool IsGapPosition() const { return (value_ & 0x2) == 0; }
67 bool IsInstructionPosition() const { return !IsGapPosition(); }
68
69 // Returns the lifetime position for the current START.
70 LifetimePosition Start() const {
71 DCHECK(IsValid());
72 return LifetimePosition(value_ & ~(kHalfStep - 1));
73 }
74
75 // Returns the lifetime position for the current gap START.
76 LifetimePosition FullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 DCHECK(IsValid());
78 return LifetimePosition(value_ & ~(kStep - 1));
79 }
80
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000081 // Returns the lifetime position for the current END.
82 LifetimePosition End() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 return LifetimePosition(Start().value_ + kHalfStep / 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 }
86
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 // Returns the lifetime position for the beginning of the next START.
88 LifetimePosition NextStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 return LifetimePosition(Start().value_ + kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091 }
92
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000093 // Returns the lifetime position for the beginning of the next gap START.
94 LifetimePosition NextFullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000096 return LifetimePosition(FullStart().value_ + kStep);
97 }
98
99 // Returns the lifetime position for the beginning of the previous START.
100 LifetimePosition PrevStart() const {
101 DCHECK(IsValid());
102 DCHECK(value_ >= kHalfStep);
103 return LifetimePosition(Start().value_ - kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 }
105
106 // Constructs the lifetime position which does not correspond to any
107 // instruction.
108 LifetimePosition() : value_(-1) {}
109
110 // Returns true if this lifetime positions corrensponds to some
111 // instruction.
112 bool IsValid() const { return value_ != -1; }
113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 bool operator<(const LifetimePosition& that) const {
115 return this->value_ < that.value_;
116 }
117
118 bool operator<=(const LifetimePosition& that) const {
119 return this->value_ <= that.value_;
120 }
121
122 bool operator==(const LifetimePosition& that) const {
123 return this->value_ == that.value_;
124 }
125
126 bool operator!=(const LifetimePosition& that) const {
127 return this->value_ != that.value_;
128 }
129
130 bool operator>(const LifetimePosition& that) const {
131 return this->value_ > that.value_;
132 }
133
134 bool operator>=(const LifetimePosition& that) const {
135 return this->value_ >= that.value_;
136 }
137
138 void Print() const;
139
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 static inline LifetimePosition Invalid() { return LifetimePosition(); }
141
142 static inline LifetimePosition MaxPosition() {
143 // We have to use this kind of getter instead of static member due to
144 // crash bug in GDB.
145 return LifetimePosition(kMaxInt);
146 }
147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000148 static inline LifetimePosition FromInt(int value) {
149 return LifetimePosition(value);
150 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 private:
153 static const int kHalfStep = 2;
154 static const int kStep = 2 * kHalfStep;
155
156 // Code relies on kStep and kHalfStep being a power of two.
157 STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158
159 explicit LifetimePosition(int value) : value_(value) {}
160
161 int value_;
162};
163
164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
166
167
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168// Representation of the non-empty interval [start,end[.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169class UseInterval final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 public:
171 UseInterval(LifetimePosition start, LifetimePosition end)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400172 : start_(start), end_(end), next_(nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 DCHECK(start < end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 }
175
176 LifetimePosition start() const { return start_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 void set_start(LifetimePosition start) { start_ = start; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 LifetimePosition end() const { return end_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000179 void set_end(LifetimePosition end) { end_ = end; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 UseInterval* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 void set_next(UseInterval* next) { next_ = next; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182
183 // Split this interval at the given position without effecting the
184 // live range that owns it. The interval must contain the position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000186
187 // If this interval intersects with other return smallest position
188 // that belongs to both of them.
189 LifetimePosition Intersect(const UseInterval* other) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 if (other->start() < start_) return other->Intersect(this);
191 if (other->start() < end_) return other->start();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 return LifetimePosition::Invalid();
193 }
194
195 bool Contains(LifetimePosition point) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000196 return start_ <= point && point < end_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000197 }
198
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000199 // Returns the index of the first gap covered by this interval.
200 int FirstGapIndex() const {
201 int ret = start_.ToInstructionIndex();
202 if (start_.IsInstructionPosition()) {
203 ++ret;
204 }
205 return ret;
206 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000207
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000208 // Returns the index of the last gap covered by this interval.
209 int LastGapIndex() const {
210 int ret = end_.ToInstructionIndex();
211 if (end_.IsGapPosition() && end_.IsStart()) {
212 --ret;
213 }
214 return ret;
215 }
216
217 private:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 LifetimePosition start_;
219 LifetimePosition end_;
220 UseInterval* next_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400221
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400222 DISALLOW_COPY_AND_ASSIGN(UseInterval);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223};
224
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
227
228
229enum class UsePositionHintType : uint8_t {
230 kNone,
231 kOperand,
232 kUsePos,
233 kPhi,
234 kUnresolved
235};
236
237
238static const int32_t kUnassignedRegister =
239 RegisterConfiguration::kMaxGeneralRegisters;
240
241
242static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxDoubleRegisters,
243 "kUnassignedRegister too small");
244
245
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246// Representation of a use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247class UsePosition final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
250 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251
252 InstructionOperand* operand() const { return operand_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400253 bool HasOperand() const { return operand_ != nullptr; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 bool RegisterIsBeneficial() const {
256 return RegisterBeneficialField::decode(flags_);
257 }
258 UsePositionType type() const { return TypeField::decode(flags_); }
259 void set_type(UsePositionType type, bool register_beneficial);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260
261 LifetimePosition pos() const { return pos_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 UsePosition* next() const { return next_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264 void set_next(UsePosition* next) { next_ = next; }
265
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 // For hinting only.
267 void set_assigned_register(int register_code) {
268 flags_ = AssignedRegisterField::update(flags_, register_code);
269 }
270
271 UsePositionHintType hint_type() const {
272 return HintTypeField::decode(flags_);
273 }
274 bool HasHint() const;
275 bool HintRegister(int* register_code) const;
276 void ResolveHint(UsePosition* use_pos);
277 bool IsResolved() const {
278 return hint_type() != UsePositionHintType::kUnresolved;
279 }
280 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281
282 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 typedef BitField<UsePositionType, 0, 2> TypeField;
284 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
285 typedef BitField<bool, 5, 1> RegisterBeneficialField;
286 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
287
288 InstructionOperand* const operand_;
289 void* hint_;
290 UsePosition* next_;
291 LifetimePosition const pos_;
292 uint32_t flags_;
293
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400294 DISALLOW_COPY_AND_ASSIGN(UsePosition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295};
296
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400298class SpillRange;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299class RegisterAllocationData;
300class TopLevelLiveRange;
301class LiveRangeGroup;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400302
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000303// Representation of SSA values' live ranges as a collection of (continuous)
304// intervals over the instruction ordering.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305class LiveRange : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 UseInterval* first_interval() const { return first_interval_; }
308 UsePosition* first_pos() const { return first_pos_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309 TopLevelLiveRange* TopLevel() { return top_level_; }
310 const TopLevelLiveRange* TopLevel() const { return top_level_; }
311
312 bool IsTopLevel() const;
313
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 LiveRange* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315
316 int relative_id() const { return relative_id_; }
317
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400318 bool IsEmpty() const { return first_interval() == nullptr; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319
320 InstructionOperand GetAssignedOperand() const;
321
322 MachineRepresentation representation() const {
323 return RepresentationField::decode(bits_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000324 }
325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
327 bool HasRegisterAssigned() const {
328 return assigned_register() != kUnassignedRegister;
329 }
330 void set_assigned_register(int reg);
331 void UnsetAssignedRegister();
332
333 bool spilled() const { return SpilledField::decode(bits_); }
334 void Spill();
335
336 RegisterKind kind() const;
337
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000338 // Returns use position in this live range that follows both start
339 // and last processed use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 UsePosition* NextUsePosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000341
342 // Returns use position for which register is required in this live
343 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 UsePosition* NextRegisterPosition(LifetimePosition start) const;
345
346 // Returns the first use position requiring stack slot, or nullptr.
347 UsePosition* NextSlotPosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000348
349 // Returns use position for which register is beneficial in this live
350 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000351 UsePosition* NextUsePositionRegisterIsBeneficial(
352 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000353
354 // Returns use position for which register is beneficial in this live
355 // range and which precedes start.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000356 UsePosition* PreviousUsePositionRegisterIsBeneficial(
357 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358
359 // Can this live range be spilled at this position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 bool CanBeSpilled(LifetimePosition pos) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 // Splitting primitive used by both splitting and splintering members.
363 // Performs the split, but does not link the resulting ranges.
364 // The given position must follow the start of the range.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000365 // All uses following the given position will be moved from this
366 // live range to the result live range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000367 // The current range will terminate at position, while result will start from
368 // position.
369 UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
370 Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000372 // Detaches at position, and then links the resulting ranges. Returns the
373 // child, which starts at position.
374 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 // Returns nullptr when no register is hinted, otherwise sets register_index.
377 UsePosition* FirstHintPosition(int* register_index) const;
378 UsePosition* FirstHintPosition() const {
379 int register_index;
380 return FirstHintPosition(&register_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382
383 UsePosition* current_hint_position() const {
384 DCHECK(current_hint_position_ == FirstHintPosition());
385 return current_hint_position_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 }
387
388 LifetimePosition Start() const {
389 DCHECK(!IsEmpty());
390 return first_interval()->start();
391 }
392
393 LifetimePosition End() const {
394 DCHECK(!IsEmpty());
395 return last_interval_->end();
396 }
397
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000398 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
399 bool CanCover(LifetimePosition position) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 bool Covers(LifetimePosition position) const;
401 LifetimePosition FirstIntersection(LiveRange* other) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000402
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000403 void VerifyChildStructure() const {
404 VerifyIntervals();
405 VerifyPositions();
406 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408 void ConvertUsesToOperand(const InstructionOperand& op,
409 const InstructionOperand& spill_op);
410 void SetUseHints(int register_index);
411 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000412
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000413 // Used solely by the Greedy Allocator:
414 unsigned GetSize();
415 float weight() const { return weight_; }
416 void set_weight(float weight) { weight_ = weight; }
417 LiveRangeGroup* group() const { return group_; }
418 void set_group(LiveRangeGroup* group) { group_ = group; }
419 void Print(const RegisterConfiguration* config, bool with_children) const;
420 void Print(bool with_children) const;
421
422 static const int kInvalidSize = -1;
423 static const float kInvalidWeight;
424 static const float kMaxWeight;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000425
426 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 friend class TopLevelLiveRange;
428 explicit LiveRange(int relative_id, MachineRepresentation rep,
429 TopLevelLiveRange* top_level);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
432
433 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
434
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
436 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
437 LifetimePosition but_not_past) const;
438
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439 void VerifyPositions() const;
440 void VerifyIntervals() const;
441
442 typedef BitField<bool, 0, 1> SpilledField;
443 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
444 typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
445
446 // Unique among children and splinters of the same virtual register.
447 int relative_id_;
448 uint32_t bits_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 UseInterval* last_interval_;
450 UseInterval* first_interval_;
451 UsePosition* first_pos_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000452 TopLevelLiveRange* top_level_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000453 LiveRange* next_;
454 // This is used as a cache, it doesn't affect correctness.
455 mutable UseInterval* current_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 // This is used as a cache, it doesn't affect correctness.
457 mutable UsePosition* last_processed_use_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 // This is used as a cache, it's invalid outside of BuildLiveRanges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000459 mutable UsePosition* current_hint_position_;
460 // Cache the last position splintering stopped at.
461 mutable UsePosition* splitting_pointer_;
462 // greedy: the number of LifetimePositions covered by this range. Used to
463 // prioritize selecting live ranges for register assignment, as well as
464 // in weight calculations.
465 int size_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 // greedy: a metric for resolving conflicts between ranges with an assigned
468 // register and ranges that intersect them and need a register.
469 float weight_;
470
471 // greedy: groupping
472 LiveRangeGroup* group_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400473
474 DISALLOW_COPY_AND_ASSIGN(LiveRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475};
476
477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478class LiveRangeGroup final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
481 ZoneVector<LiveRange*>& ranges() { return ranges_; }
482 const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
483
484 // TODO(mtrofin): populate assigned register and use in weight calculation.
485 int assigned_register() const { return assigned_register_; }
486 void set_assigned_register(int reg) { assigned_register_ = reg; }
487
488 private:
489 ZoneVector<LiveRange*> ranges_;
490 int assigned_register_;
491 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
492};
493
494
495class TopLevelLiveRange final : public LiveRange {
496 public:
497 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
498 int spill_start_index() const { return spill_start_index_; }
499
500 bool IsFixed() const { return vreg_ < 0; }
501
502 bool is_phi() const { return IsPhiField::decode(bits_); }
503 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
504
505 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
506 void set_is_non_loop_phi(bool value) {
507 bits_ = IsNonLoopPhiField::update(bits_, value);
508 }
509
510 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
511 void set_has_slot_use(bool value) {
512 bits_ = HasSlotUseField::update(bits_, value);
513 }
514
515 // Add a new interval or a new use position to this live range.
516 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
517 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
518 void AddUsePosition(UsePosition* pos);
519
520 // Shorten the most recently added interval by setting a new start.
521 void ShortenTo(LifetimePosition start);
522
523 // Detaches between start and end, and attributes the resulting range to
524 // result.
525 // The current range is pointed to as "splintered_from". No parent/child
526 // relationship is established between this and result.
527 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
528
529 // Assuming other was splintered from this range, embeds other and its
530 // children as part of the children sequence of this range.
531 void Merge(TopLevelLiveRange* other, Zone* zone);
532
533 // Spill range management.
534 void SetSpillRange(SpillRange* spill_range);
535 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
536 void set_spill_type(SpillType value) {
537 bits_ = SpillTypeField::update(bits_, value);
538 }
539 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
540 InstructionOperand* GetSpillOperand() const {
541 DCHECK(spill_type() == SpillType::kSpillOperand);
542 return spill_operand_;
543 }
544
545 SpillRange* GetAllocatedSpillRange() const {
546 DCHECK(spill_type() != SpillType::kSpillOperand);
547 return spill_range_;
548 }
549
550 SpillRange* GetSpillRange() const {
551 DCHECK(spill_type() == SpillType::kSpillRange);
552 return spill_range_;
553 }
554 bool HasNoSpillType() const {
555 return spill_type() == SpillType::kNoSpillType;
556 }
557 bool HasSpillOperand() const {
558 return spill_type() == SpillType::kSpillOperand;
559 }
560 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
561
562 AllocatedOperand GetSpillRangeOperand() const;
563
564 void RecordSpillLocation(Zone* zone, int gap_index,
565 InstructionOperand* operand);
566 void SetSpillOperand(InstructionOperand* operand);
567 void SetSpillStartIndex(int start) {
568 spill_start_index_ = Min(start, spill_start_index_);
569 }
570
571 void CommitSpillMoves(InstructionSequence* sequence,
572 const InstructionOperand& operand,
573 bool might_be_duplicated);
574
575 // If all the children of this range are spilled in deferred blocks, and if
576 // for any non-spilled child with a use position requiring a slot, that range
577 // is contained in a deferred block, mark the range as
578 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
579 // and instead let the LiveRangeConnector perform the spills within the
580 // deferred blocks. If so, we insert here spills for non-spilled ranges
581 // with slot use positions.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100582 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000583 spill_start_index_ = -1;
584 spilled_in_deferred_blocks_ = true;
585 spill_move_insertion_locations_ = nullptr;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100586 list_of_blocks_requiring_spill_operands_ =
587 new (zone) BitVector(total_block_count, zone);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000588 }
589
Ben Murdoch097c5b22016-05-18 11:27:45 +0100590 void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
591 const InstructionOperand& spill_operand,
592 BitVector* necessary_spill_points);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593
594 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
595 bool IsSplinter() const { return splintered_from_ != nullptr; }
596 bool MayRequireSpillRange() const {
597 DCHECK(!IsSplinter());
598 return !HasSpillOperand() && spill_range_ == nullptr;
599 }
600 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
601 int vreg() const { return vreg_; }
602
603#if DEBUG
604 int debug_virt_reg() const;
605#endif
606
607 void Verify() const;
608 void VerifyChildrenInOrder() const;
609
610 int GetNextChildId() {
611 return IsSplinter() ? splintered_from()->GetNextChildId()
612 : ++last_child_id_;
613 }
614
615 int GetChildCount() const { return last_child_id_ + 1; }
616
617 bool IsSpilledOnlyInDeferredBlocks() const {
618 return spilled_in_deferred_blocks_;
619 }
620
621 struct SpillMoveInsertionList;
622
Ben Murdoch097c5b22016-05-18 11:27:45 +0100623 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
624 DCHECK(!IsSpilledOnlyInDeferredBlocks());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 return spill_move_insertion_locations_;
626 }
627 TopLevelLiveRange* splinter() const { return splinter_; }
628 void SetSplinter(TopLevelLiveRange* splinter) {
629 DCHECK_NULL(splinter_);
630 DCHECK_NOT_NULL(splinter);
631
632 splinter_ = splinter;
633 splinter->relative_id_ = GetNextChildId();
634 splinter->set_spill_type(spill_type());
635 splinter->SetSplinteredFrom(this);
636 }
637
638 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
639 bool has_preassigned_slot() const { return has_preassigned_slot_; }
640
Ben Murdoch097c5b22016-05-18 11:27:45 +0100641 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
642 DCHECK(IsSpilledOnlyInDeferredBlocks());
643 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
644 }
645
646 BitVector* GetListOfBlocksRequiringSpillOperands() const {
647 DCHECK(IsSpilledOnlyInDeferredBlocks());
648 return list_of_blocks_requiring_spill_operands_;
649 }
650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651 private:
652 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
653
654 typedef BitField<bool, 1, 1> HasSlotUseField;
655 typedef BitField<bool, 2, 1> IsPhiField;
656 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
657 typedef BitField<SpillType, 4, 2> SpillTypeField;
658
659 int vreg_;
660 int last_child_id_;
661 TopLevelLiveRange* splintered_from_;
662 union {
663 // Correct value determined by spill_type()
664 InstructionOperand* spill_operand_;
665 SpillRange* spill_range_;
666 };
Ben Murdoch097c5b22016-05-18 11:27:45 +0100667
668 union {
669 SpillMoveInsertionList* spill_move_insertion_locations_;
670 BitVector* list_of_blocks_requiring_spill_operands_;
671 };
672
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673 // TODO(mtrofin): generalize spilling after definition, currently specialized
674 // just for spill in a single deferred block.
675 bool spilled_in_deferred_blocks_;
676 int spill_start_index_;
677 UsePosition* last_pos_;
678 TopLevelLiveRange* splinter_;
679 bool has_preassigned_slot_;
680
681 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
682};
683
684
685struct PrintableLiveRange {
686 const RegisterConfiguration* register_configuration_;
687 const LiveRange* range_;
688};
689
690
691std::ostream& operator<<(std::ostream& os,
692 const PrintableLiveRange& printable_range);
693
694
695class SpillRange final : public ZoneObject {
696 public:
697 static const int kUnassignedSlot = -1;
698 SpillRange(TopLevelLiveRange* range, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400700 UseInterval* interval() const { return use_interval_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701 // Currently, only 4 or 8 byte slots are supported.
702 int ByteWidth() const;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400703 bool IsEmpty() const { return live_ranges_.empty(); }
704 bool TryMerge(SpillRange* other);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
706
707 void set_assigned_slot(int index) {
708 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
709 assigned_slot_ = index;
710 }
711 int assigned_slot() {
712 DCHECK_NE(kUnassignedSlot, assigned_slot_);
713 return assigned_slot_;
714 }
715 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
716 return live_ranges_;
717 }
718 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
719 int byte_width() const { return byte_width_; }
720 RegisterKind kind() const { return kind_; }
721 void Print() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400723 private:
724 LifetimePosition End() const { return end_position_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400725 bool IsIntersectingWith(SpillRange* other) const;
726 // Merge intervals, making sure the use intervals are sorted
727 void MergeDisjointIntervals(UseInterval* other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000728
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000729 ZoneVector<TopLevelLiveRange*> live_ranges_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400730 UseInterval* use_interval_;
731 LifetimePosition end_position_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000732 int assigned_slot_;
733 int byte_width_;
734 RegisterKind kind_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400736 DISALLOW_COPY_AND_ASSIGN(SpillRange);
737};
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000738
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400739
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740class RegisterAllocationData final : public ZoneObject {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400741 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742 class PhiMapValue : public ZoneObject {
743 public:
744 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400745
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 const PhiInstruction* phi() const { return phi_; }
747 const InstructionBlock* block() const { return block_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400748
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000749 // For hinting.
750 int assigned_register() const { return assigned_register_; }
751 void set_assigned_register(int register_code) {
752 DCHECK_EQ(assigned_register_, kUnassignedRegister);
753 assigned_register_ = register_code;
754 }
755 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
756
757 void AddOperand(InstructionOperand* operand);
758 void CommitAssignment(const InstructionOperand& operand);
759
760 private:
761 PhiInstruction* const phi_;
762 const InstructionBlock* const block_;
763 ZoneVector<InstructionOperand*> incoming_operands_;
764 int assigned_register_;
765 };
766 typedef ZoneMap<int, PhiMapValue*> PhiMap;
767
768 struct DelayedReference {
769 ReferenceMap* map;
770 InstructionOperand* operand;
771 };
772 typedef ZoneVector<DelayedReference> DelayedReferences;
773 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
774 RangesWithPreassignedSlots;
775
776 RegisterAllocationData(const RegisterConfiguration* config,
777 Zone* allocation_zone, Frame* frame,
778 InstructionSequence* code,
779 const char* debug_name = nullptr);
780
781 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
782 return live_ranges_;
783 }
784 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
785 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400786 return fixed_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000788 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
789 return fixed_live_ranges_;
790 }
791 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400792 return fixed_double_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
795 return fixed_double_live_ranges_;
796 }
797 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
798 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
799 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
800 DelayedReferences& delayed_references() { return delayed_references_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400801 InstructionSequence* code() const { return code_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000802 // This zone is for datastructures only needed during register allocation
803 // phases.
804 Zone* allocation_zone() const { return allocation_zone_; }
805 // This zone is for InstructionOperands and moves that live beyond register
806 // allocation.
807 Zone* code_zone() const { return code()->zone(); }
808 Frame* frame() const { return frame_; }
809 const char* debug_name() const { return debug_name_; }
810 const RegisterConfiguration* config() const { return config_; }
811
812 MachineRepresentation RepresentationFor(int virtual_register);
813
814 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
815 // Creates a new live range.
816 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
817 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
818
819 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
820 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
821
822 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
823 const InstructionOperand& from,
824 const InstructionOperand& to);
825
826 bool IsReference(TopLevelLiveRange* top_range) const {
827 return code()->IsReference(top_range->vreg());
828 }
829
830 bool ExistsUseWithoutDefinition();
831 bool RangesDefinedInDeferredStayInDeferred();
832
833 void MarkAllocated(RegisterKind kind, int index);
834
835 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
836 PhiInstruction* phi);
837 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
838 PhiMapValue* GetPhiMapValueFor(int virtual_register);
839 bool IsBlockBoundary(LifetimePosition pos) const;
840
841 RangesWithPreassignedSlots& preassigned_slot_ranges() {
842 return preassigned_slot_ranges_;
843 }
844
845 private:
846 int GetNextLiveRangeId();
847
848 Zone* const allocation_zone_;
849 Frame* const frame_;
850 InstructionSequence* const code_;
851 const char* const debug_name_;
852 const RegisterConfiguration* const config_;
853 PhiMap phi_map_;
854 ZoneVector<int> allocatable_codes_;
855 ZoneVector<int> allocatable_double_codes_;
856 ZoneVector<BitVector*> live_in_sets_;
857 ZoneVector<BitVector*> live_out_sets_;
858 ZoneVector<TopLevelLiveRange*> live_ranges_;
859 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
860 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
861 ZoneVector<SpillRange*> spill_ranges_;
862 DelayedReferences delayed_references_;
863 BitVector* assigned_registers_;
864 BitVector* assigned_double_registers_;
865 int virtual_register_count_;
866 RangesWithPreassignedSlots preassigned_slot_ranges_;
867
868 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
869};
870
871
872class ConstraintBuilder final : public ZoneObject {
873 public:
874 explicit ConstraintBuilder(RegisterAllocationData* data);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000875
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400876 // Phase 1 : insert moves to account for fixed register operands.
877 void MeetRegisterConstraints();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400879 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
880 // of blocks containing phis.
881 void ResolvePhis();
882
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400883 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000884 RegisterAllocationData* data() const { return data_; }
885 InstructionSequence* code() const { return data()->code(); }
886 Zone* allocation_zone() const { return data()->allocation_zone(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000888 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
889 bool is_tagged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400890 void MeetRegisterConstraints(const InstructionBlock* block);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 void MeetConstraintsBefore(int index);
892 void MeetConstraintsAfter(int index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400893 void MeetRegisterConstraintsForLastInstructionInBlock(
894 const InstructionBlock* block);
895 void ResolvePhis(const InstructionBlock* block);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897 RegisterAllocationData* const data_;
898
899 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
900};
901
902
903class LiveRangeBuilder final : public ZoneObject {
904 public:
905 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
906
907 // Phase 3: compute liveness of all virtual register.
908 void BuildLiveRanges();
909 static BitVector* ComputeLiveOut(const InstructionBlock* block,
910 RegisterAllocationData* data);
911
912 private:
913 RegisterAllocationData* data() const { return data_; }
914 InstructionSequence* code() const { return data()->code(); }
915 Zone* allocation_zone() const { return data()->allocation_zone(); }
916 Zone* code_zone() const { return code()->zone(); }
917 const RegisterConfiguration* config() const { return data()->config(); }
918 ZoneVector<BitVector*>& live_in_sets() const {
919 return data()->live_in_sets();
920 }
921
Ben Murdochda12d292016-06-02 14:46:10 +0100922 // Verification.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923 void Verify() const;
Ben Murdochda12d292016-06-02 14:46:10 +0100924 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
925 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
926 const TopLevelLiveRange* range) const;
927 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928
929 // Liveness analysis support.
930 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
931 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
932 void ProcessPhis(const InstructionBlock* block, BitVector* live);
933 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
934
935 static int FixedLiveRangeID(int index) { return -index - 1; }
936 int FixedDoubleLiveRangeID(int index);
937 TopLevelLiveRange* FixedLiveRangeFor(int index);
938 TopLevelLiveRange* FixedDoubleLiveRangeFor(int index);
939
940 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
941 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
942
943 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
944 void* hint, UsePositionHintType hint_type);
945 UsePosition* NewUsePosition(LifetimePosition pos) {
946 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
947 }
948 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000949 // Helper methods for building intervals.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
951 void* hint, UsePositionHintType hint_type);
952 void Define(LifetimePosition position, InstructionOperand* operand) {
953 Define(position, operand, nullptr, UsePositionHintType::kNone);
954 }
955 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
956 InstructionOperand* operand, void* hint,
957 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 void Use(LifetimePosition block_start, LifetimePosition position,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 InstructionOperand* operand) {
960 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
961 }
962
963 RegisterAllocationData* const data_;
964 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
965
966 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
967};
968
969
970class RegisterAllocator : public ZoneObject {
971 public:
972 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
973
974 protected:
975 RegisterAllocationData* data() const { return data_; }
976 InstructionSequence* code() const { return data()->code(); }
977 RegisterKind mode() const { return mode_; }
978 int num_registers() const { return num_registers_; }
979 int num_allocatable_registers() const { return num_allocatable_registers_; }
980 int allocatable_register_code(int allocatable_index) const {
981 return allocatable_register_codes_[allocatable_index];
982 }
983
984 // TODO(mtrofin): explain why splitting in gap START is always OK.
985 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
986 int instruction_index);
987
988 Zone* allocation_zone() const { return data()->allocation_zone(); }
989
990 // Find the optimal split for ranges defined by a memory operand, e.g.
991 // constants or function parameters passed on the stack.
992 void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only);
993
994 // Split the given range at the given position.
995 // If range starts at or after the given position then the
996 // original range is returned.
997 // Otherwise returns the live range that starts at pos and contains
998 // all uses from the original range that follow pos. Uses at pos will
999 // still be owned by the original range after splitting.
1000 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1001
1002 bool CanProcessRange(LiveRange* range) const {
1003 return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1004 }
1005
1006
1007 // Split the given range in a position from the interval [start, end].
1008 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1009 LifetimePosition end);
1010
1011 // Find a lifetime position in the interval [start, end] which
1012 // is optimal for splitting: it is either header of the outermost
1013 // loop covered by this interval or the latest possible position.
1014 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1015 LifetimePosition end);
1016
1017 void Spill(LiveRange* range);
1018
1019 // If we are trying to spill a range inside the loop try to
1020 // hoist spill position out to the point just before the loop.
1021 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1022 LifetimePosition pos);
1023
1024 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1025 const char* RegisterName(int allocation_index) const;
1026
1027 private:
1028 RegisterAllocationData* const data_;
1029 const RegisterKind mode_;
1030 const int num_registers_;
1031 int num_allocatable_registers_;
1032 const int* allocatable_register_codes_;
1033
1034 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1035};
1036
1037
1038class LinearScanAllocator final : public RegisterAllocator {
1039 public:
1040 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1041 Zone* local_zone);
1042
1043 // Phase 4: compute register assignments.
1044 void AllocateRegisters();
1045
1046 private:
1047 ZoneVector<LiveRange*>& unhandled_live_ranges() {
1048 return unhandled_live_ranges_;
1049 }
1050 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1051 ZoneVector<LiveRange*>& inactive_live_ranges() {
1052 return inactive_live_ranges_;
1053 }
1054
1055 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056
1057 // Helper methods for updating the life range lists.
1058 void AddToActive(LiveRange* range);
1059 void AddToInactive(LiveRange* range);
1060 void AddToUnhandledSorted(LiveRange* range);
1061 void AddToUnhandledUnsorted(LiveRange* range);
1062 void SortUnhandled();
1063 bool UnhandledIsSorted();
1064 void ActiveToHandled(LiveRange* range);
1065 void ActiveToInactive(LiveRange* range);
1066 void InactiveToHandled(LiveRange* range);
1067 void InactiveToActive(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068
1069 // Helper methods for allocating registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001070 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001071 bool TryAllocateFreeReg(LiveRange* range);
1072 void AllocateBlockedReg(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001073
1074 // Spill the given life range after position pos.
1075 void SpillAfter(LiveRange* range, LifetimePosition pos);
1076
1077 // Spill the given life range after position [start] and up to position [end].
1078 void SpillBetween(LiveRange* range, LifetimePosition start,
1079 LifetimePosition end);
1080
1081 // Spill the given life range after position [start] and up to position [end].
1082 // Range is guaranteed to be spilled at least until position [until].
1083 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1084 LifetimePosition until, LifetimePosition end);
1085
1086 void SplitAndSpillIntersecting(LiveRange* range);
1087
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001088 ZoneVector<LiveRange*> unhandled_live_ranges_;
1089 ZoneVector<LiveRange*> active_live_ranges_;
1090 ZoneVector<LiveRange*> inactive_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001091
1092#ifdef DEBUG
1093 LifetimePosition allocation_finger_;
1094#endif
1095
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001096 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1097};
1098
1099
1100class SpillSlotLocator final : public ZoneObject {
1101 public:
1102 explicit SpillSlotLocator(RegisterAllocationData* data);
1103
1104 void LocateSpillSlots();
1105
1106 private:
1107 RegisterAllocationData* data() const { return data_; }
1108
1109 RegisterAllocationData* const data_;
1110
1111 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1112};
1113
1114
1115class OperandAssigner final : public ZoneObject {
1116 public:
1117 explicit OperandAssigner(RegisterAllocationData* data);
1118
1119 // Phase 5: assign spill splots.
1120 void AssignSpillSlots();
1121
1122 // Phase 6: commit assignment.
1123 void CommitAssignment();
1124
1125 private:
1126 RegisterAllocationData* data() const { return data_; }
1127
1128 RegisterAllocationData* const data_;
1129
1130 DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1131};
1132
1133
1134class ReferenceMapPopulator final : public ZoneObject {
1135 public:
1136 explicit ReferenceMapPopulator(RegisterAllocationData* data);
1137
1138 // Phase 7: compute values for pointer maps.
1139 void PopulateReferenceMaps();
1140
1141 private:
1142 RegisterAllocationData* data() const { return data_; }
1143
1144 bool SafePointsAreInOrder() const;
1145
1146 RegisterAllocationData* const data_;
1147
1148 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1149};
1150
1151
Ben Murdoch097c5b22016-05-18 11:27:45 +01001152class LiveRangeBoundArray;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153// Insert moves of the form
1154//
1155// Operand(child_(k+1)) = Operand(child_k)
1156//
1157// where child_k and child_(k+1) are consecutive children of a range (so
1158// child_k->next() == child_(k+1)), and Operand(...) refers to the
1159// assigned operand, be it a register or a slot.
1160class LiveRangeConnector final : public ZoneObject {
1161 public:
1162 explicit LiveRangeConnector(RegisterAllocationData* data);
1163
1164 // Phase 8: reconnect split ranges with moves, when the control flow
1165 // between the ranges is trivial (no branches).
1166 void ConnectRanges(Zone* local_zone);
1167
1168 // Phase 9: insert moves to connect ranges across basic blocks, when the
1169 // control flow between them cannot be trivially resolved, such as joining
1170 // branches.
1171 void ResolveControlFlow(Zone* local_zone);
1172
1173 private:
1174 RegisterAllocationData* data() const { return data_; }
1175 InstructionSequence* code() const { return data()->code(); }
1176 Zone* code_zone() const { return code()->zone(); }
1177
1178 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1179
1180 int ResolveControlFlow(const InstructionBlock* block,
1181 const InstructionOperand& cur_op,
1182 const InstructionBlock* pred,
1183 const InstructionOperand& pred_op);
1184
Ben Murdoch097c5b22016-05-18 11:27:45 +01001185 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1186 LiveRangeBoundArray* array,
1187 Zone* temp_zone);
1188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 RegisterAllocationData* const data_;
1190
1191 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001192};
1193
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001194} // namespace compiler
1195} // namespace internal
1196} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001197
1198#endif // V8_REGISTER_ALLOCATOR_H_