blob: caadcba18b31759d92d30e0a00ef383490f76387 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_REGISTER_ALLOCATOR_H_
6#define V8_REGISTER_ALLOCATOR_H_
7
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/ostreams.h"
10#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/zone-containers.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015namespace compiler {
16
Ben Murdochc5610432016-08-08 18:44:38 +010017enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018
19// This class represents a single point of a InstructionOperand's lifetime. For
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020// each instruction there are four lifetime positions:
21//
22// [[START, END], [START, END]]
23//
24// Where the first half position corresponds to
25//
26// [GapPosition::START, GapPosition::END]
27//
28// and the second half position corresponds to
29//
30// [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
31//
32class LifetimePosition final {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033 public:
34 // Return the lifetime position that corresponds to the beginning of
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 // the gap with the given index.
36 static LifetimePosition GapFromInstructionIndex(int index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 return LifetimePosition(index * kStep);
38 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 // Return the lifetime position that corresponds to the beginning of
40 // the instruction with the given index.
41 static LifetimePosition InstructionFromInstructionIndex(int index) {
42 return LifetimePosition(index * kStep + kHalfStep);
43 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044
Ben Murdochc5610432016-08-08 18:44:38 +010045 static bool ExistsGapPositionBetween(LifetimePosition pos1,
46 LifetimePosition pos2) {
47 if (pos1 > pos2) std::swap(pos1, pos2);
48 LifetimePosition next(pos1.value_ + 1);
49 if (next.IsGapPosition()) return next < pos2;
50 return next.NextFullStart() < pos2;
51 }
52
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 // Returns a numeric representation of this lifetime position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 int value() const { return value_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Returns the index of the instruction to which this lifetime position
57 // corresponds.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 int ToInstructionIndex() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 DCHECK(IsValid());
60 return value_ / kStep;
61 }
62
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 // Returns true if this lifetime position corresponds to a START value
64 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
65 // Returns true if this lifetime position corresponds to an END value
66 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
67 // Returns true if this lifetime position corresponds to a gap START value
68 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 bool IsGapPosition() const { return (value_ & 0x2) == 0; }
71 bool IsInstructionPosition() const { return !IsGapPosition(); }
72
73 // Returns the lifetime position for the current START.
74 LifetimePosition Start() const {
75 DCHECK(IsValid());
76 return LifetimePosition(value_ & ~(kHalfStep - 1));
77 }
78
79 // Returns the lifetime position for the current gap START.
80 LifetimePosition FullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000081 DCHECK(IsValid());
82 return LifetimePosition(value_ & ~(kStep - 1));
83 }
84
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 // Returns the lifetime position for the current END.
86 LifetimePosition End() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 return LifetimePosition(Start().value_ + kHalfStep / 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 }
90
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000091 // Returns the lifetime position for the beginning of the next START.
92 LifetimePosition NextStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 return LifetimePosition(Start().value_ + kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 }
96
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000097 // Returns the lifetime position for the beginning of the next gap START.
98 LifetimePosition NextFullStart() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 DCHECK(IsValid());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 return LifetimePosition(FullStart().value_ + kStep);
101 }
102
103 // Returns the lifetime position for the beginning of the previous START.
104 LifetimePosition PrevStart() const {
105 DCHECK(IsValid());
106 DCHECK(value_ >= kHalfStep);
107 return LifetimePosition(Start().value_ - kHalfStep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108 }
109
110 // Constructs the lifetime position which does not correspond to any
111 // instruction.
112 LifetimePosition() : value_(-1) {}
113
114 // Returns true if this lifetime positions corrensponds to some
115 // instruction.
116 bool IsValid() const { return value_ != -1; }
117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 bool operator<(const LifetimePosition& that) const {
119 return this->value_ < that.value_;
120 }
121
122 bool operator<=(const LifetimePosition& that) const {
123 return this->value_ <= that.value_;
124 }
125
126 bool operator==(const LifetimePosition& that) const {
127 return this->value_ == that.value_;
128 }
129
130 bool operator!=(const LifetimePosition& that) const {
131 return this->value_ != that.value_;
132 }
133
134 bool operator>(const LifetimePosition& that) const {
135 return this->value_ > that.value_;
136 }
137
138 bool operator>=(const LifetimePosition& that) const {
139 return this->value_ >= that.value_;
140 }
141
142 void Print() const;
143
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 static inline LifetimePosition Invalid() { return LifetimePosition(); }
145
146 static inline LifetimePosition MaxPosition() {
147 // We have to use this kind of getter instead of static member due to
148 // crash bug in GDB.
149 return LifetimePosition(kMaxInt);
150 }
151
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 static inline LifetimePosition FromInt(int value) {
153 return LifetimePosition(value);
154 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156 private:
157 static const int kHalfStep = 2;
158 static const int kStep = 2 * kHalfStep;
159
160 // Code relies on kStep and kHalfStep being a power of two.
161 STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162
163 explicit LifetimePosition(int value) : value_(value) {}
164
165 int value_;
166};
167
168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
170
171
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172// Representation of the non-empty interval [start,end[.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173class UseInterval final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 public:
175 UseInterval(LifetimePosition start, LifetimePosition end)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 : start_(start), end_(end), next_(nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 DCHECK(start < end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 }
179
180 LifetimePosition start() const { return start_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 void set_start(LifetimePosition start) { start_ = start; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182 LifetimePosition end() const { return end_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183 void set_end(LifetimePosition end) { end_ = end; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 UseInterval* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 void set_next(UseInterval* next) { next_ = next; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000186
187 // Split this interval at the given position without effecting the
188 // live range that owns it. The interval must contain the position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190
191 // If this interval intersects with other return smallest position
192 // that belongs to both of them.
193 LifetimePosition Intersect(const UseInterval* other) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194 if (other->start() < start_) return other->Intersect(this);
195 if (other->start() < end_) return other->start();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 return LifetimePosition::Invalid();
197 }
198
199 bool Contains(LifetimePosition point) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 return start_ <= point && point < end_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000201 }
202
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 // Returns the index of the first gap covered by this interval.
204 int FirstGapIndex() const {
205 int ret = start_.ToInstructionIndex();
206 if (start_.IsInstructionPosition()) {
207 ++ret;
208 }
209 return ret;
210 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000212 // Returns the index of the last gap covered by this interval.
213 int LastGapIndex() const {
214 int ret = end_.ToInstructionIndex();
215 if (end_.IsGapPosition() && end_.IsStart()) {
216 --ret;
217 }
218 return ret;
219 }
220
221 private:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 LifetimePosition start_;
223 LifetimePosition end_;
224 UseInterval* next_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400226 DISALLOW_COPY_AND_ASSIGN(UseInterval);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227};
228
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400229
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
231
232
233enum class UsePositionHintType : uint8_t {
234 kNone,
235 kOperand,
236 kUsePos,
237 kPhi,
238 kUnresolved
239};
240
241
242static const int32_t kUnassignedRegister =
243 RegisterConfiguration::kMaxGeneralRegisters;
244
Ben Murdochc5610432016-08-08 18:44:38 +0100245static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 "kUnassignedRegister too small");
247
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248// Representation of a use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249class UsePosition final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
252 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253
254 InstructionOperand* operand() const { return operand_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400255 bool HasOperand() const { return operand_ != nullptr; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 bool RegisterIsBeneficial() const {
258 return RegisterBeneficialField::decode(flags_);
259 }
260 UsePositionType type() const { return TypeField::decode(flags_); }
261 void set_type(UsePositionType type, bool register_beneficial);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
263 LifetimePosition pos() const { return pos_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 UsePosition* next() const { return next_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 void set_next(UsePosition* next) { next_ = next; }
267
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 // For hinting only.
269 void set_assigned_register(int register_code) {
270 flags_ = AssignedRegisterField::update(flags_, register_code);
271 }
272
273 UsePositionHintType hint_type() const {
274 return HintTypeField::decode(flags_);
275 }
276 bool HasHint() const;
277 bool HintRegister(int* register_code) const;
278 void ResolveHint(UsePosition* use_pos);
279 bool IsResolved() const {
280 return hint_type() != UsePositionHintType::kUnresolved;
281 }
282 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400283
284 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 typedef BitField<UsePositionType, 0, 2> TypeField;
286 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
287 typedef BitField<bool, 5, 1> RegisterBeneficialField;
288 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
289
290 InstructionOperand* const operand_;
291 void* hint_;
292 UsePosition* next_;
293 LifetimePosition const pos_;
294 uint32_t flags_;
295
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400296 DISALLOW_COPY_AND_ASSIGN(UsePosition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297};
298
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400300class SpillRange;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301class RegisterAllocationData;
302class TopLevelLiveRange;
303class LiveRangeGroup;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400304
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305// Representation of SSA values' live ranges as a collection of (continuous)
306// intervals over the instruction ordering.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307class LiveRange : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000309 UseInterval* first_interval() const { return first_interval_; }
310 UsePosition* first_pos() const { return first_pos_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311 TopLevelLiveRange* TopLevel() { return top_level_; }
312 const TopLevelLiveRange* TopLevel() const { return top_level_; }
313
314 bool IsTopLevel() const;
315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 LiveRange* next() const { return next_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000317
318 int relative_id() const { return relative_id_; }
319
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320 bool IsEmpty() const { return first_interval() == nullptr; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321
322 InstructionOperand GetAssignedOperand() const;
323
324 MachineRepresentation representation() const {
325 return RepresentationField::decode(bits_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 }
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
329 bool HasRegisterAssigned() const {
330 return assigned_register() != kUnassignedRegister;
331 }
332 void set_assigned_register(int reg);
333 void UnsetAssignedRegister();
334
335 bool spilled() const { return SpilledField::decode(bits_); }
336 void Spill();
337
338 RegisterKind kind() const;
339
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340 // Returns use position in this live range that follows both start
341 // and last processed use position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 UsePosition* NextUsePosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343
344 // Returns use position for which register is required in this live
345 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 UsePosition* NextRegisterPosition(LifetimePosition start) const;
347
348 // Returns the first use position requiring stack slot, or nullptr.
349 UsePosition* NextSlotPosition(LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350
351 // Returns use position for which register is beneficial in this live
352 // range and which follows both start and last processed use position
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000353 UsePosition* NextUsePositionRegisterIsBeneficial(
354 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355
356 // Returns use position for which register is beneficial in this live
357 // range and which precedes start.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 UsePosition* PreviousUsePositionRegisterIsBeneficial(
359 LifetimePosition start) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360
361 // Can this live range be spilled at this position.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 bool CanBeSpilled(LifetimePosition pos) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 // Splitting primitive used by both splitting and splintering members.
365 // Performs the split, but does not link the resulting ranges.
366 // The given position must follow the start of the range.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 // All uses following the given position will be moved from this
368 // live range to the result live range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 // The current range will terminate at position, while result will start from
370 // position.
371 UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
372 Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // Detaches at position, and then links the resulting ranges. Returns the
375 // child, which starts at position.
376 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378 // Returns nullptr when no register is hinted, otherwise sets register_index.
379 UsePosition* FirstHintPosition(int* register_index) const;
380 UsePosition* FirstHintPosition() const {
381 int register_index;
382 return FirstHintPosition(&register_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384
385 UsePosition* current_hint_position() const {
386 DCHECK(current_hint_position_ == FirstHintPosition());
387 return current_hint_position_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000388 }
389
390 LifetimePosition Start() const {
391 DCHECK(!IsEmpty());
392 return first_interval()->start();
393 }
394
395 LifetimePosition End() const {
396 DCHECK(!IsEmpty());
397 return last_interval_->end();
398 }
399
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
401 bool CanCover(LifetimePosition position) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 bool Covers(LifetimePosition position) const;
403 LifetimePosition FirstIntersection(LiveRange* other) const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 void VerifyChildStructure() const {
406 VerifyIntervals();
407 VerifyPositions();
408 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000409
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410 void ConvertUsesToOperand(const InstructionOperand& op,
411 const InstructionOperand& spill_op);
412 void SetUseHints(int register_index);
413 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 void Print(const RegisterConfiguration* config, bool with_children) const;
416 void Print(bool with_children) const;
417
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000419 friend class TopLevelLiveRange;
420 explicit LiveRange(int relative_id, MachineRepresentation rep,
421 TopLevelLiveRange* top_level);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400422
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000423 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
424
425 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
426
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
428 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
429 LifetimePosition but_not_past) const;
430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 void VerifyPositions() const;
432 void VerifyIntervals() const;
433
434 typedef BitField<bool, 0, 1> SpilledField;
435 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
436 typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
437
438 // Unique among children and splinters of the same virtual register.
439 int relative_id_;
440 uint32_t bits_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 UseInterval* last_interval_;
442 UseInterval* first_interval_;
443 UsePosition* first_pos_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444 TopLevelLiveRange* top_level_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000445 LiveRange* next_;
446 // This is used as a cache, it doesn't affect correctness.
447 mutable UseInterval* current_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 // This is used as a cache, it doesn't affect correctness.
449 mutable UsePosition* last_processed_use_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 // This is used as a cache, it's invalid outside of BuildLiveRanges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 mutable UsePosition* current_hint_position_;
452 // Cache the last position splintering stopped at.
453 mutable UsePosition* splitting_pointer_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400454
455 DISALLOW_COPY_AND_ASSIGN(LiveRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456};
457
458
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000459class LiveRangeGroup final : public ZoneObject {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
462 ZoneVector<LiveRange*>& ranges() { return ranges_; }
463 const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
464
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465 int assigned_register() const { return assigned_register_; }
466 void set_assigned_register(int reg) { assigned_register_ = reg; }
467
468 private:
469 ZoneVector<LiveRange*> ranges_;
470 int assigned_register_;
471 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
472};
473
474
475class TopLevelLiveRange final : public LiveRange {
476 public:
477 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
478 int spill_start_index() const { return spill_start_index_; }
479
480 bool IsFixed() const { return vreg_ < 0; }
481
482 bool is_phi() const { return IsPhiField::decode(bits_); }
483 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
484
485 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
486 void set_is_non_loop_phi(bool value) {
487 bits_ = IsNonLoopPhiField::update(bits_, value);
488 }
489
490 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
491 void set_has_slot_use(bool value) {
492 bits_ = HasSlotUseField::update(bits_, value);
493 }
494
495 // Add a new interval or a new use position to this live range.
496 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
497 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
498 void AddUsePosition(UsePosition* pos);
499
500 // Shorten the most recently added interval by setting a new start.
501 void ShortenTo(LifetimePosition start);
502
503 // Detaches between start and end, and attributes the resulting range to
504 // result.
505 // The current range is pointed to as "splintered_from". No parent/child
506 // relationship is established between this and result.
507 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
508
509 // Assuming other was splintered from this range, embeds other and its
510 // children as part of the children sequence of this range.
511 void Merge(TopLevelLiveRange* other, Zone* zone);
512
513 // Spill range management.
514 void SetSpillRange(SpillRange* spill_range);
515 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
516 void set_spill_type(SpillType value) {
517 bits_ = SpillTypeField::update(bits_, value);
518 }
519 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
520 InstructionOperand* GetSpillOperand() const {
521 DCHECK(spill_type() == SpillType::kSpillOperand);
522 return spill_operand_;
523 }
524
525 SpillRange* GetAllocatedSpillRange() const {
526 DCHECK(spill_type() != SpillType::kSpillOperand);
527 return spill_range_;
528 }
529
530 SpillRange* GetSpillRange() const {
531 DCHECK(spill_type() == SpillType::kSpillRange);
532 return spill_range_;
533 }
534 bool HasNoSpillType() const {
535 return spill_type() == SpillType::kNoSpillType;
536 }
537 bool HasSpillOperand() const {
538 return spill_type() == SpillType::kSpillOperand;
539 }
540 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
541
542 AllocatedOperand GetSpillRangeOperand() const;
543
544 void RecordSpillLocation(Zone* zone, int gap_index,
545 InstructionOperand* operand);
546 void SetSpillOperand(InstructionOperand* operand);
547 void SetSpillStartIndex(int start) {
548 spill_start_index_ = Min(start, spill_start_index_);
549 }
550
551 void CommitSpillMoves(InstructionSequence* sequence,
552 const InstructionOperand& operand,
553 bool might_be_duplicated);
554
555 // If all the children of this range are spilled in deferred blocks, and if
556 // for any non-spilled child with a use position requiring a slot, that range
557 // is contained in a deferred block, mark the range as
558 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
559 // and instead let the LiveRangeConnector perform the spills within the
560 // deferred blocks. If so, we insert here spills for non-spilled ranges
561 // with slot use positions.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100562 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 spill_start_index_ = -1;
564 spilled_in_deferred_blocks_ = true;
565 spill_move_insertion_locations_ = nullptr;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100566 list_of_blocks_requiring_spill_operands_ =
567 new (zone) BitVector(total_block_count, zone);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000568 }
569
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570 void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
571 const InstructionOperand& spill_operand,
572 BitVector* necessary_spill_points);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573
574 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
575 bool IsSplinter() const { return splintered_from_ != nullptr; }
576 bool MayRequireSpillRange() const {
577 DCHECK(!IsSplinter());
578 return !HasSpillOperand() && spill_range_ == nullptr;
579 }
580 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
581 int vreg() const { return vreg_; }
582
583#if DEBUG
584 int debug_virt_reg() const;
585#endif
586
587 void Verify() const;
588 void VerifyChildrenInOrder() const;
589
590 int GetNextChildId() {
591 return IsSplinter() ? splintered_from()->GetNextChildId()
592 : ++last_child_id_;
593 }
594
595 int GetChildCount() const { return last_child_id_ + 1; }
596
597 bool IsSpilledOnlyInDeferredBlocks() const {
598 return spilled_in_deferred_blocks_;
599 }
600
601 struct SpillMoveInsertionList;
602
Ben Murdoch097c5b22016-05-18 11:27:45 +0100603 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
604 DCHECK(!IsSpilledOnlyInDeferredBlocks());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 return spill_move_insertion_locations_;
606 }
607 TopLevelLiveRange* splinter() const { return splinter_; }
608 void SetSplinter(TopLevelLiveRange* splinter) {
609 DCHECK_NULL(splinter_);
610 DCHECK_NOT_NULL(splinter);
611
612 splinter_ = splinter;
613 splinter->relative_id_ = GetNextChildId();
614 splinter->set_spill_type(spill_type());
615 splinter->SetSplinteredFrom(this);
616 }
617
618 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
619 bool has_preassigned_slot() const { return has_preassigned_slot_; }
620
Ben Murdoch097c5b22016-05-18 11:27:45 +0100621 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
622 DCHECK(IsSpilledOnlyInDeferredBlocks());
623 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
624 }
625
626 BitVector* GetListOfBlocksRequiringSpillOperands() const {
627 DCHECK(IsSpilledOnlyInDeferredBlocks());
628 return list_of_blocks_requiring_spill_operands_;
629 }
630
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 private:
632 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
633
634 typedef BitField<bool, 1, 1> HasSlotUseField;
635 typedef BitField<bool, 2, 1> IsPhiField;
636 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
637 typedef BitField<SpillType, 4, 2> SpillTypeField;
638
639 int vreg_;
640 int last_child_id_;
641 TopLevelLiveRange* splintered_from_;
642 union {
643 // Correct value determined by spill_type()
644 InstructionOperand* spill_operand_;
645 SpillRange* spill_range_;
646 };
Ben Murdoch097c5b22016-05-18 11:27:45 +0100647
648 union {
649 SpillMoveInsertionList* spill_move_insertion_locations_;
650 BitVector* list_of_blocks_requiring_spill_operands_;
651 };
652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000653 // TODO(mtrofin): generalize spilling after definition, currently specialized
654 // just for spill in a single deferred block.
655 bool spilled_in_deferred_blocks_;
656 int spill_start_index_;
657 UsePosition* last_pos_;
658 TopLevelLiveRange* splinter_;
659 bool has_preassigned_slot_;
660
661 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
662};
663
664
665struct PrintableLiveRange {
666 const RegisterConfiguration* register_configuration_;
667 const LiveRange* range_;
668};
669
670
671std::ostream& operator<<(std::ostream& os,
672 const PrintableLiveRange& printable_range);
673
674
675class SpillRange final : public ZoneObject {
676 public:
677 static const int kUnassignedSlot = -1;
678 SpillRange(TopLevelLiveRange* range, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400680 UseInterval* interval() const { return use_interval_; }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100681
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400682 bool IsEmpty() const { return live_ranges_.empty(); }
683 bool TryMerge(SpillRange* other);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000684 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
685
686 void set_assigned_slot(int index) {
687 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
688 assigned_slot_ = index;
689 }
690 int assigned_slot() {
691 DCHECK_NE(kUnassignedSlot, assigned_slot_);
692 return assigned_slot_;
693 }
694 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
695 return live_ranges_;
696 }
697 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
698 int byte_width() const { return byte_width_; }
699 RegisterKind kind() const { return kind_; }
700 void Print() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 private:
703 LifetimePosition End() const { return end_position_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400704 bool IsIntersectingWith(SpillRange* other) const;
705 // Merge intervals, making sure the use intervals are sorted
706 void MergeDisjointIntervals(UseInterval* other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708 ZoneVector<TopLevelLiveRange*> live_ranges_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400709 UseInterval* use_interval_;
710 LifetimePosition end_position_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711 int assigned_slot_;
712 int byte_width_;
713 RegisterKind kind_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400715 DISALLOW_COPY_AND_ASSIGN(SpillRange);
716};
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400718
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719class RegisterAllocationData final : public ZoneObject {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400720 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000721 class PhiMapValue : public ZoneObject {
722 public:
723 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400724
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000725 const PhiInstruction* phi() const { return phi_; }
726 const InstructionBlock* block() const { return block_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400727
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 // For hinting.
729 int assigned_register() const { return assigned_register_; }
730 void set_assigned_register(int register_code) {
731 DCHECK_EQ(assigned_register_, kUnassignedRegister);
732 assigned_register_ = register_code;
733 }
734 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
735
736 void AddOperand(InstructionOperand* operand);
737 void CommitAssignment(const InstructionOperand& operand);
738
739 private:
740 PhiInstruction* const phi_;
741 const InstructionBlock* const block_;
742 ZoneVector<InstructionOperand*> incoming_operands_;
743 int assigned_register_;
744 };
745 typedef ZoneMap<int, PhiMapValue*> PhiMap;
746
747 struct DelayedReference {
748 ReferenceMap* map;
749 InstructionOperand* operand;
750 };
751 typedef ZoneVector<DelayedReference> DelayedReferences;
752 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
753 RangesWithPreassignedSlots;
754
755 RegisterAllocationData(const RegisterConfiguration* config,
756 Zone* allocation_zone, Frame* frame,
757 InstructionSequence* code,
758 const char* debug_name = nullptr);
759
760 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
761 return live_ranges_;
762 }
763 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
764 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400765 return fixed_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000767 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
768 return fixed_live_ranges_;
769 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100770 ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
771 return fixed_float_live_ranges_;
772 }
773 const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
774 return fixed_float_live_ranges_;
775 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000776 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400777 return fixed_double_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
780 return fixed_double_live_ranges_;
781 }
782 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
783 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
784 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
785 DelayedReferences& delayed_references() { return delayed_references_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400786 InstructionSequence* code() const { return code_; }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100787 // This zone is for data structures only needed during register allocation
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000788 // phases.
789 Zone* allocation_zone() const { return allocation_zone_; }
790 // This zone is for InstructionOperands and moves that live beyond register
791 // allocation.
792 Zone* code_zone() const { return code()->zone(); }
793 Frame* frame() const { return frame_; }
794 const char* debug_name() const { return debug_name_; }
795 const RegisterConfiguration* config() const { return config_; }
796
797 MachineRepresentation RepresentationFor(int virtual_register);
798
799 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
800 // Creates a new live range.
801 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
802 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
803
804 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
805 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
806
807 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
808 const InstructionOperand& from,
809 const InstructionOperand& to);
810
811 bool IsReference(TopLevelLiveRange* top_range) const {
812 return code()->IsReference(top_range->vreg());
813 }
814
815 bool ExistsUseWithoutDefinition();
816 bool RangesDefinedInDeferredStayInDeferred();
817
Ben Murdoch61f157c2016-09-16 13:49:30 +0100818 void MarkAllocated(MachineRepresentation rep, int index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819
820 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
821 PhiInstruction* phi);
822 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
823 PhiMapValue* GetPhiMapValueFor(int virtual_register);
824 bool IsBlockBoundary(LifetimePosition pos) const;
825
826 RangesWithPreassignedSlots& preassigned_slot_ranges() {
827 return preassigned_slot_ranges_;
828 }
829
830 private:
831 int GetNextLiveRangeId();
832
833 Zone* const allocation_zone_;
834 Frame* const frame_;
835 InstructionSequence* const code_;
836 const char* const debug_name_;
837 const RegisterConfiguration* const config_;
838 PhiMap phi_map_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000839 ZoneVector<BitVector*> live_in_sets_;
840 ZoneVector<BitVector*> live_out_sets_;
841 ZoneVector<TopLevelLiveRange*> live_ranges_;
842 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100843 ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
845 ZoneVector<SpillRange*> spill_ranges_;
846 DelayedReferences delayed_references_;
847 BitVector* assigned_registers_;
848 BitVector* assigned_double_registers_;
849 int virtual_register_count_;
850 RangesWithPreassignedSlots preassigned_slot_ranges_;
851
852 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
853};
854
855
856class ConstraintBuilder final : public ZoneObject {
857 public:
858 explicit ConstraintBuilder(RegisterAllocationData* data);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400860 // Phase 1 : insert moves to account for fixed register operands.
861 void MeetRegisterConstraints();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400863 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
864 // of blocks containing phis.
865 void ResolvePhis();
866
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400867 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000868 RegisterAllocationData* data() const { return data_; }
869 InstructionSequence* code() const { return data()->code(); }
870 Zone* allocation_zone() const { return data()->allocation_zone(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000872 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
873 bool is_tagged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400874 void MeetRegisterConstraints(const InstructionBlock* block);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000875 void MeetConstraintsBefore(int index);
876 void MeetConstraintsAfter(int index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400877 void MeetRegisterConstraintsForLastInstructionInBlock(
878 const InstructionBlock* block);
879 void ResolvePhis(const InstructionBlock* block);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000881 RegisterAllocationData* const data_;
882
883 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
884};
885
886
887class LiveRangeBuilder final : public ZoneObject {
888 public:
889 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
890
891 // Phase 3: compute liveness of all virtual register.
892 void BuildLiveRanges();
893 static BitVector* ComputeLiveOut(const InstructionBlock* block,
894 RegisterAllocationData* data);
895
896 private:
897 RegisterAllocationData* data() const { return data_; }
898 InstructionSequence* code() const { return data()->code(); }
899 Zone* allocation_zone() const { return data()->allocation_zone(); }
900 Zone* code_zone() const { return code()->zone(); }
901 const RegisterConfiguration* config() const { return data()->config(); }
902 ZoneVector<BitVector*>& live_in_sets() const {
903 return data()->live_in_sets();
904 }
905
Ben Murdochda12d292016-06-02 14:46:10 +0100906 // Verification.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907 void Verify() const;
Ben Murdochda12d292016-06-02 14:46:10 +0100908 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
909 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
910 const TopLevelLiveRange* range) const;
911 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000912
913 // Liveness analysis support.
914 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
915 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
916 void ProcessPhis(const InstructionBlock* block, BitVector* live);
917 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
918
919 static int FixedLiveRangeID(int index) { return -index - 1; }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100920 int FixedFPLiveRangeID(int index, MachineRepresentation rep);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 TopLevelLiveRange* FixedLiveRangeFor(int index);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100922 TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923
924 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
925 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
926
927 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
928 void* hint, UsePositionHintType hint_type);
929 UsePosition* NewUsePosition(LifetimePosition pos) {
930 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
931 }
932 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 // Helper methods for building intervals.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
935 void* hint, UsePositionHintType hint_type);
936 void Define(LifetimePosition position, InstructionOperand* operand) {
937 Define(position, operand, nullptr, UsePositionHintType::kNone);
938 }
939 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
940 InstructionOperand* operand, void* hint,
941 UsePositionHintType hint_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000942 void Use(LifetimePosition block_start, LifetimePosition position,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000943 InstructionOperand* operand) {
944 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
945 }
946
947 RegisterAllocationData* const data_;
948 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
949
950 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
951};
952
953
954class RegisterAllocator : public ZoneObject {
955 public:
Ben Murdoch61f157c2016-09-16 13:49:30 +0100956 RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000957
958 protected:
959 RegisterAllocationData* data() const { return data_; }
960 InstructionSequence* code() const { return data()->code(); }
961 RegisterKind mode() const { return mode_; }
962 int num_registers() const { return num_registers_; }
963 int num_allocatable_registers() const { return num_allocatable_registers_; }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100964 const int* allocatable_register_codes() const {
965 return allocatable_register_codes_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966 }
967
968 // TODO(mtrofin): explain why splitting in gap START is always OK.
969 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
970 int instruction_index);
971
972 Zone* allocation_zone() const { return data()->allocation_zone(); }
973
974 // Find the optimal split for ranges defined by a memory operand, e.g.
975 // constants or function parameters passed on the stack.
976 void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only);
977
978 // Split the given range at the given position.
979 // If range starts at or after the given position then the
980 // original range is returned.
981 // Otherwise returns the live range that starts at pos and contains
982 // all uses from the original range that follow pos. Uses at pos will
983 // still be owned by the original range after splitting.
984 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
985
986 bool CanProcessRange(LiveRange* range) const {
987 return range != nullptr && !range->IsEmpty() && range->kind() == mode();
988 }
989
990
991 // Split the given range in a position from the interval [start, end].
992 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
993 LifetimePosition end);
994
995 // Find a lifetime position in the interval [start, end] which
996 // is optimal for splitting: it is either header of the outermost
997 // loop covered by this interval or the latest possible position.
998 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
999 LifetimePosition end);
1000
1001 void Spill(LiveRange* range);
1002
1003 // If we are trying to spill a range inside the loop try to
1004 // hoist spill position out to the point just before the loop.
1005 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1006 LifetimePosition pos);
1007
1008 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1009 const char* RegisterName(int allocation_index) const;
1010
1011 private:
1012 RegisterAllocationData* const data_;
1013 const RegisterKind mode_;
1014 const int num_registers_;
1015 int num_allocatable_registers_;
1016 const int* allocatable_register_codes_;
1017
Ben Murdoch61f157c2016-09-16 13:49:30 +01001018 private:
1019 bool no_combining_;
1020
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1022};
1023
1024
1025class LinearScanAllocator final : public RegisterAllocator {
1026 public:
1027 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1028 Zone* local_zone);
1029
1030 // Phase 4: compute register assignments.
1031 void AllocateRegisters();
1032
1033 private:
1034 ZoneVector<LiveRange*>& unhandled_live_ranges() {
1035 return unhandled_live_ranges_;
1036 }
1037 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1038 ZoneVector<LiveRange*>& inactive_live_ranges() {
1039 return inactive_live_ranges_;
1040 }
1041
1042 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043
1044 // Helper methods for updating the life range lists.
1045 void AddToActive(LiveRange* range);
1046 void AddToInactive(LiveRange* range);
1047 void AddToUnhandledSorted(LiveRange* range);
1048 void AddToUnhandledUnsorted(LiveRange* range);
1049 void SortUnhandled();
1050 bool UnhandledIsSorted();
1051 void ActiveToHandled(LiveRange* range);
1052 void ActiveToInactive(LiveRange* range);
1053 void InactiveToHandled(LiveRange* range);
1054 void InactiveToActive(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001055
1056 // Helper methods for allocating registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001058 bool TryAllocateFreeReg(LiveRange* range);
1059 void AllocateBlockedReg(LiveRange* range);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001060
1061 // Spill the given life range after position pos.
1062 void SpillAfter(LiveRange* range, LifetimePosition pos);
1063
1064 // Spill the given life range after position [start] and up to position [end].
1065 void SpillBetween(LiveRange* range, LifetimePosition start,
1066 LifetimePosition end);
1067
1068 // Spill the given life range after position [start] and up to position [end].
1069 // Range is guaranteed to be spilled at least until position [until].
1070 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1071 LifetimePosition until, LifetimePosition end);
1072
1073 void SplitAndSpillIntersecting(LiveRange* range);
1074
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001075 ZoneVector<LiveRange*> unhandled_live_ranges_;
1076 ZoneVector<LiveRange*> active_live_ranges_;
1077 ZoneVector<LiveRange*> inactive_live_ranges_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001078
1079#ifdef DEBUG
1080 LifetimePosition allocation_finger_;
1081#endif
1082
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1084};
1085
1086
1087class SpillSlotLocator final : public ZoneObject {
1088 public:
1089 explicit SpillSlotLocator(RegisterAllocationData* data);
1090
1091 void LocateSpillSlots();
1092
1093 private:
1094 RegisterAllocationData* data() const { return data_; }
1095
1096 RegisterAllocationData* const data_;
1097
1098 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1099};
1100
1101
1102class OperandAssigner final : public ZoneObject {
1103 public:
1104 explicit OperandAssigner(RegisterAllocationData* data);
1105
1106 // Phase 5: assign spill splots.
1107 void AssignSpillSlots();
1108
1109 // Phase 6: commit assignment.
1110 void CommitAssignment();
1111
1112 private:
1113 RegisterAllocationData* data() const { return data_; }
1114
1115 RegisterAllocationData* const data_;
1116
1117 DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1118};
1119
1120
1121class ReferenceMapPopulator final : public ZoneObject {
1122 public:
1123 explicit ReferenceMapPopulator(RegisterAllocationData* data);
1124
1125 // Phase 7: compute values for pointer maps.
1126 void PopulateReferenceMaps();
1127
1128 private:
1129 RegisterAllocationData* data() const { return data_; }
1130
1131 bool SafePointsAreInOrder() const;
1132
1133 RegisterAllocationData* const data_;
1134
1135 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1136};
1137
1138
Ben Murdoch097c5b22016-05-18 11:27:45 +01001139class LiveRangeBoundArray;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140// Insert moves of the form
1141//
1142// Operand(child_(k+1)) = Operand(child_k)
1143//
1144// where child_k and child_(k+1) are consecutive children of a range (so
1145// child_k->next() == child_(k+1)), and Operand(...) refers to the
1146// assigned operand, be it a register or a slot.
1147class LiveRangeConnector final : public ZoneObject {
1148 public:
1149 explicit LiveRangeConnector(RegisterAllocationData* data);
1150
1151 // Phase 8: reconnect split ranges with moves, when the control flow
1152 // between the ranges is trivial (no branches).
1153 void ConnectRanges(Zone* local_zone);
1154
1155 // Phase 9: insert moves to connect ranges across basic blocks, when the
1156 // control flow between them cannot be trivially resolved, such as joining
1157 // branches.
1158 void ResolveControlFlow(Zone* local_zone);
1159
1160 private:
1161 RegisterAllocationData* data() const { return data_; }
1162 InstructionSequence* code() const { return data()->code(); }
1163 Zone* code_zone() const { return code()->zone(); }
1164
1165 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1166
1167 int ResolveControlFlow(const InstructionBlock* block,
1168 const InstructionOperand& cur_op,
1169 const InstructionBlock* pred,
1170 const InstructionOperand& pred_op);
1171
Ben Murdoch097c5b22016-05-18 11:27:45 +01001172 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1173 LiveRangeBoundArray* array,
1174 Zone* temp_zone);
1175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001176 RegisterAllocationData* const data_;
1177
1178 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179};
1180
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001181} // namespace compiler
1182} // namespace internal
1183} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001184
1185#endif // V8_REGISTER_ALLOCATOR_H_