blob: 468367264ac96d0f10f00111851e1a910d3f1b51 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006#include "src/compiler/linkage.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "src/compiler/register-allocator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/string-stream.h"
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#define TRACE(...) \
15 do { \
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
17 } while (false)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020namespace {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040023 auto it = std::find(v->begin(), v->end(), range);
24 DCHECK(it != v->end());
25 v->erase(it);
26}
27
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
Ben Murdochc5610432016-08-08 18:44:38 +010029 return kind == FP_REGISTERS ? cfg->num_double_registers()
30 : cfg->num_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031}
32
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
35 RegisterKind kind) {
Ben Murdochc5610432016-08-08 18:44:38 +010036 return kind == FP_REGISTERS ? cfg->num_allocatable_aliased_double_registers()
37 : cfg->num_allocatable_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038}
39
40
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
42 RegisterKind kind) {
Ben Murdochc5610432016-08-08 18:44:38 +010043 return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
44 : cfg->allocatable_general_codes();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045}
46
47
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
49 const InstructionBlock* block) {
50 RpoNumber index = block->loop_header();
51 if (!index.IsValid()) return nullptr;
52 return sequence->InstructionBlockAt(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053}
54
55
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
57 LifetimePosition pos) {
58 return code->GetInstructionBlock(pos.ToInstructionIndex());
59}
60
61
62Instruction* GetLastInstruction(InstructionSequence* code,
63 const InstructionBlock* block) {
64 return code->InstructionAt(block->last_instruction_index());
65}
66
67
68bool IsOutputRegisterOf(Instruction* instr, Register reg) {
69 for (size_t i = 0; i < instr->OutputCount(); i++) {
70 InstructionOperand* output = instr->OutputAt(i);
71 if (output->IsRegister() &&
72 LocationOperand::cast(output)->GetRegister().is(reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 return true;
74 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075 }
76 return false;
77}
78
79
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
81 for (size_t i = 0; i < instr->OutputCount(); i++) {
82 InstructionOperand* output = instr->OutputAt(i);
Ben Murdochc5610432016-08-08 18:44:38 +010083 if (output->IsFPRegister() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
85 return true;
86 }
87 }
88 return false;
89}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000090
91
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000092// TODO(dcarney): fix frame to allow frame accesses to half size location.
93int GetByteWidth(MachineRepresentation rep) {
94 switch (rep) {
95 case MachineRepresentation::kBit:
96 case MachineRepresentation::kWord8:
97 case MachineRepresentation::kWord16:
98 case MachineRepresentation::kWord32:
99 case MachineRepresentation::kTagged:
100 return kPointerSize;
101 case MachineRepresentation::kFloat32:
102 case MachineRepresentation::kWord64:
103 case MachineRepresentation::kFloat64:
104 return 8;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100105 case MachineRepresentation::kSimd128:
106 return 16;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 case MachineRepresentation::kNone:
108 break;
109 }
110 UNREACHABLE();
111 return 0;
112}
113
114} // namespace
115
Ben Murdoch097c5b22016-05-18 11:27:45 +0100116class LiveRangeBound {
117 public:
118 explicit LiveRangeBound(LiveRange* range, bool skip)
119 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
120 DCHECK(!range->IsEmpty());
121 }
122
123 bool CanCover(LifetimePosition position) {
124 return start_ <= position && position < end_;
125 }
126
127 LiveRange* const range_;
128 const LifetimePosition start_;
129 const LifetimePosition end_;
130 const bool skip_;
131
132 private:
133 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
134};
135
136
137struct FindResult {
138 LiveRange* cur_cover_;
139 LiveRange* pred_cover_;
140};
141
142
143class LiveRangeBoundArray {
144 public:
145 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
146
147 bool ShouldInitialize() { return start_ == nullptr; }
148
149 void Initialize(Zone* zone, TopLevelLiveRange* range) {
150 length_ = range->GetChildCount();
151
152 start_ = zone->NewArray<LiveRangeBound>(length_);
153 LiveRangeBound* curr = start_;
154 // Normally, spilled ranges do not need connecting moves, because the spill
155 // location has been assigned at definition. For ranges spilled in deferred
156 // blocks, that is not the case, so we need to connect the spilled children.
157 for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
158 new (curr) LiveRangeBound(i, i->spilled());
159 }
160 }
161
162 LiveRangeBound* Find(const LifetimePosition position) const {
163 size_t left_index = 0;
164 size_t right_index = length_;
165 while (true) {
166 size_t current_index = left_index + (right_index - left_index) / 2;
167 DCHECK(right_index > current_index);
168 LiveRangeBound* bound = &start_[current_index];
169 if (bound->start_ <= position) {
170 if (position < bound->end_) return bound;
171 DCHECK(left_index < current_index);
172 left_index = current_index;
173 } else {
174 right_index = current_index;
175 }
176 }
177 }
178
179 LiveRangeBound* FindPred(const InstructionBlock* pred) {
180 LifetimePosition pred_end =
181 LifetimePosition::InstructionFromInstructionIndex(
182 pred->last_instruction_index());
183 return Find(pred_end);
184 }
185
186 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
187 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
188 succ->first_instruction_index());
189 return Find(succ_start);
190 }
191
192 bool FindConnectableSubranges(const InstructionBlock* block,
193 const InstructionBlock* pred,
194 FindResult* result) const {
195 LifetimePosition pred_end =
196 LifetimePosition::InstructionFromInstructionIndex(
197 pred->last_instruction_index());
198 LiveRangeBound* bound = Find(pred_end);
199 result->pred_cover_ = bound->range_;
200 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
201 block->first_instruction_index());
202
203 if (bound->CanCover(cur_start)) {
204 // Both blocks are covered by the same range, so there is nothing to
205 // connect.
206 return false;
207 }
208 bound = Find(cur_start);
209 if (bound->skip_) {
210 return false;
211 }
212 result->cur_cover_ = bound->range_;
213 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
214 return (result->cur_cover_ != result->pred_cover_);
215 }
216
217 private:
218 size_t length_;
219 LiveRangeBound* start_;
220
221 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
222};
223
224
225class LiveRangeFinder {
226 public:
227 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
228 : data_(data),
229 bounds_length_(static_cast<int>(data_->live_ranges().size())),
230 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
231 zone_(zone) {
232 for (int i = 0; i < bounds_length_; ++i) {
233 new (&bounds_[i]) LiveRangeBoundArray();
234 }
235 }
236
237 LiveRangeBoundArray* ArrayFor(int operand_index) {
238 DCHECK(operand_index < bounds_length_);
239 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
240 DCHECK(range != nullptr && !range->IsEmpty());
241 LiveRangeBoundArray* array = &bounds_[operand_index];
242 if (array->ShouldInitialize()) {
243 array->Initialize(zone_, range);
244 }
245 return array;
246 }
247
248 private:
249 const RegisterAllocationData* const data_;
250 const int bounds_length_;
251 LiveRangeBoundArray* const bounds_;
252 Zone* const zone_;
253
254 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
255};
256
257
258typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
259
260
261struct DelayedInsertionMapCompare {
262 bool operator()(const DelayedInsertionMapKey& a,
263 const DelayedInsertionMapKey& b) const {
264 if (a.first == b.first) {
265 return a.second.Compare(b.second);
266 }
267 return a.first < b.first;
268 }
269};
270
271
272typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
273 DelayedInsertionMapCompare> DelayedInsertionMap;
274
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275
276UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
277 void* hint, UsePositionHintType hint_type)
278 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
279 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
280 bool register_beneficial = true;
281 UsePositionType type = UsePositionType::kAny;
282 if (operand_ != nullptr && operand_->IsUnallocated()) {
283 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
284 if (unalloc->HasRegisterPolicy()) {
285 type = UsePositionType::kRequiresRegister;
286 } else if (unalloc->HasSlotPolicy()) {
287 type = UsePositionType::kRequiresSlot;
288 register_beneficial = false;
289 } else {
290 register_beneficial = !unalloc->HasAnyPolicy();
291 }
292 }
293 flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
294 RegisterBeneficialField::encode(register_beneficial) |
295 AssignedRegisterField::encode(kUnassignedRegister);
296 DCHECK(pos_.IsValid());
297}
298
299
300bool UsePosition::HasHint() const {
301 int hint_register;
302 return HintRegister(&hint_register);
303}
304
305
306bool UsePosition::HintRegister(int* register_code) const {
307 if (hint_ == nullptr) return false;
308 switch (HintTypeField::decode(flags_)) {
309 case UsePositionHintType::kNone:
310 case UsePositionHintType::kUnresolved:
311 return false;
312 case UsePositionHintType::kUsePos: {
313 UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
314 int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
315 if (assigned_register == kUnassignedRegister) return false;
316 *register_code = assigned_register;
317 return true;
318 }
319 case UsePositionHintType::kOperand: {
320 InstructionOperand* operand =
321 reinterpret_cast<InstructionOperand*>(hint_);
322 int assigned_register =
323 operand->IsRegister()
324 ? LocationOperand::cast(operand)->GetRegister().code()
325 : LocationOperand::cast(operand)->GetDoubleRegister().code();
326 *register_code = assigned_register;
327 return true;
328 }
329 case UsePositionHintType::kPhi: {
330 RegisterAllocationData::PhiMapValue* phi =
331 reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
332 int assigned_register = phi->assigned_register();
333 if (assigned_register == kUnassignedRegister) return false;
334 *register_code = assigned_register;
335 return true;
336 }
337 }
338 UNREACHABLE();
339 return false;
340}
341
342
343UsePositionHintType UsePosition::HintTypeForOperand(
344 const InstructionOperand& op) {
345 switch (op.kind()) {
346 case InstructionOperand::CONSTANT:
347 case InstructionOperand::IMMEDIATE:
348 case InstructionOperand::EXPLICIT:
349 return UsePositionHintType::kNone;
350 case InstructionOperand::UNALLOCATED:
351 return UsePositionHintType::kUnresolved;
352 case InstructionOperand::ALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100353 if (op.IsRegister() || op.IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000354 return UsePositionHintType::kOperand;
355 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100356 DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000357 return UsePositionHintType::kNone;
358 }
359 case InstructionOperand::INVALID:
360 break;
361 }
362 UNREACHABLE();
363 return UsePositionHintType::kNone;
364}
365
366
367void UsePosition::ResolveHint(UsePosition* use_pos) {
368 DCHECK_NOT_NULL(use_pos);
369 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
370 hint_ = use_pos;
371 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
372}
373
374
375void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
376 DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
377 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
378 flags_ = TypeField::encode(type) |
379 RegisterBeneficialField::encode(register_beneficial) |
380 HintTypeField::encode(HintTypeField::decode(flags_)) |
381 AssignedRegisterField::encode(kUnassignedRegister);
382}
383
384
385UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
386 DCHECK(Contains(pos) && pos != start());
387 UseInterval* after = new (zone) UseInterval(pos, end_);
388 after->next_ = next_;
389 next_ = nullptr;
390 end_ = pos;
391 return after;
392}
393
394
395void LifetimePosition::Print() const {
396 OFStream os(stdout);
397 os << *this << std::endl;
398}
399
400
401std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
402 os << '@' << pos.ToInstructionIndex();
403 if (pos.IsGapPosition()) {
404 os << 'g';
405 } else {
406 os << 'i';
407 }
408 if (pos.IsStart()) {
409 os << 's';
410 } else {
411 os << 'e';
412 }
413 return os;
414}
415
416
417const float LiveRange::kInvalidWeight = -1;
418const float LiveRange::kMaxWeight = std::numeric_limits<float>::max();
419
420
421LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
422 TopLevelLiveRange* top_level)
423 : relative_id_(relative_id),
424 bits_(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400425 last_interval_(nullptr),
426 first_interval_(nullptr),
427 first_pos_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000428 top_level_(top_level),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400429 next_(nullptr),
430 current_interval_(nullptr),
431 last_processed_use_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432 current_hint_position_(nullptr),
433 splitting_pointer_(nullptr),
434 size_(kInvalidSize),
435 weight_(kInvalidWeight),
436 group_(nullptr) {
437 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
438 bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
439 RepresentationField::encode(rep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440}
441
442
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443void LiveRange::VerifyPositions() const {
444 // Walk the positions, verifying that each is in an interval.
445 UseInterval* interval = first_interval_;
446 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
447 CHECK(Start() <= pos->pos());
448 CHECK(pos->pos() <= End());
449 CHECK_NOT_NULL(interval);
450 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
451 interval = interval->next();
452 CHECK_NOT_NULL(interval);
453 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400454 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455}
456
457
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458void LiveRange::VerifyIntervals() const {
459 DCHECK(first_interval()->start() == Start());
460 LifetimePosition last_end = first_interval()->end();
461 for (UseInterval* interval = first_interval()->next(); interval != nullptr;
462 interval = interval->next()) {
463 DCHECK(last_end <= interval->start());
464 last_end = interval->end();
465 }
466 DCHECK(last_end == End());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400467}
468
469
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470void LiveRange::set_assigned_register(int reg) {
471 DCHECK(!HasRegisterAssigned() && !spilled());
472 bits_ = AssignedRegisterField::update(bits_, reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400473}
474
475
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000476void LiveRange::UnsetAssignedRegister() {
477 DCHECK(HasRegisterAssigned() && !spilled());
478 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479}
480
481
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482void LiveRange::Spill() {
483 DCHECK(!spilled());
484 DCHECK(!TopLevel()->HasNoSpillType());
485 set_spilled(true);
486 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
487}
488
489
490RegisterKind LiveRange::kind() const {
Ben Murdochc5610432016-08-08 18:44:38 +0100491 return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492}
493
494
495UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
496 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
497 if (pos->HintRegister(register_index)) return pos;
498 }
499 return nullptr;
500}
501
502
503UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 UsePosition* use_pos = last_processed_use_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505 if (use_pos == nullptr || use_pos->pos() > start) {
506 use_pos = first_pos();
507 }
508 while (use_pos != nullptr && use_pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 use_pos = use_pos->next();
510 }
511 last_processed_use_ = use_pos;
512 return use_pos;
513}
514
515
516UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 UsePosition* pos = NextUsePosition(start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400519 while (pos != nullptr && !pos->RegisterIsBeneficial()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 pos = pos->next();
521 }
522 return pos;
523}
524
525
526UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000527 LifetimePosition start) const {
528 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400529 UsePosition* prev = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530 while (pos != nullptr && pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000531 if (pos->RegisterIsBeneficial()) prev = pos;
532 pos = pos->next();
533 }
534 return prev;
535}
536
537
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000538UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000539 UsePosition* pos = NextUsePosition(start);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 pos = pos->next();
542 }
543 return pos;
544}
545
546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
548 for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
549 pos = pos->next()) {
550 if (pos->type() != UsePositionType::kRequiresSlot) continue;
551 return pos;
552 }
553 return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000554}
555
556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
558 // We cannot spill a live range that has a use requiring a register
559 // at the current or the immediate next position.
560 UsePosition* use_pos = NextRegisterPosition(pos);
561 if (use_pos == nullptr) return true;
562 return use_pos->pos() > pos.NextStart().End();
563}
564
565
566bool LiveRange::IsTopLevel() const { return top_level_ == this; }
567
568
569InstructionOperand LiveRange::GetAssignedOperand() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570 if (HasRegisterAssigned()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571 DCHECK(!spilled());
572 return AllocatedOperand(LocationOperand::REGISTER, representation(),
573 assigned_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000575 DCHECK(spilled());
576 DCHECK(!HasRegisterAssigned());
577 if (TopLevel()->HasSpillOperand()) {
578 InstructionOperand* op = TopLevel()->GetSpillOperand();
579 DCHECK(!op->IsUnallocated());
580 return *op;
581 }
582 return TopLevel()->GetSpillRangeOperand();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583}
584
585
586UseInterval* LiveRange::FirstSearchIntervalForPosition(
587 LifetimePosition position) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400588 if (current_interval_ == nullptr) return first_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000589 if (current_interval_->start() > position) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400590 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591 return first_interval_;
592 }
593 return current_interval_;
594}
595
596
597void LiveRange::AdvanceLastProcessedMarker(
598 UseInterval* to_start_of, LifetimePosition but_not_past) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400599 if (to_start_of == nullptr) return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 if (to_start_of->start() > but_not_past) return;
601 LifetimePosition start = current_interval_ == nullptr
602 ? LifetimePosition::Invalid()
603 : current_interval_->start();
604 if (to_start_of->start() > start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000605 current_interval_ = to_start_of;
606 }
607}
608
609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
611 int new_id = TopLevel()->GetNextChildId();
612 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
613 DetachAt(position, child, zone);
614
615 child->top_level_ = TopLevel();
616 child->next_ = next_;
617 next_ = child;
618 return child;
619}
620
621
622UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
623 Zone* zone) {
624 DCHECK(Start() < position);
625 DCHECK(End() > position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 DCHECK(result->IsEmpty());
627 // Find the last interval that ends before the position. If the
628 // position is contained in one of the intervals in the chain, we
629 // split that interval and use the first part.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 UseInterval* current = FirstSearchIntervalForPosition(position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631
632 // If the split position coincides with the beginning of a use interval
633 // we need to split use positons in a special way.
634 bool split_at_start = false;
635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 if (current->start() == position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 // When splitting at start we need to locate the previous use interval.
638 current = first_interval_;
639 }
640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 UseInterval* after = nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400642 while (current != nullptr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643 if (current->Contains(position)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 after = current->SplitAt(position, zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 break;
646 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000647 UseInterval* next = current->next();
648 if (next->start() >= position) {
649 split_at_start = (next->start() == position);
650 after = next;
651 current->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 break;
653 }
654 current = next;
655 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 DCHECK(nullptr != after);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000657
658 // Partition original use intervals to the two live ranges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000659 UseInterval* before = current;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660 result->last_interval_ =
661 (last_interval_ == before)
662 ? after // Only interval in the range after split.
663 : last_interval_; // Last interval of the original range.
664 result->first_interval_ = after;
665 last_interval_ = before;
666
667 // Find the last use position before the split and the first use
668 // position after it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669 UsePosition* use_after =
670 splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
671 ? first_pos()
672 : splitting_pointer_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400673 UsePosition* use_before = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 if (split_at_start) {
675 // The split position coincides with the beginning of a use interval (the
676 // end of a lifetime hole). Use at this position should be attributed to
677 // the split child because split child owns use interval covering it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000678 while (use_after != nullptr && use_after->pos() < position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679 use_before = use_after;
680 use_after = use_after->next();
681 }
682 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000683 while (use_after != nullptr && use_after->pos() <= position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 use_before = use_after;
685 use_after = use_after->next();
686 }
687 }
688
689 // Partition original use positions to the two live ranges.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400690 if (use_before != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000691 use_before->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400693 first_pos_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 }
695 result->first_pos_ = use_after;
696
697 // Discard cached iteration state. It might be pointing
698 // to the use that no longer belongs to this live range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400699 last_processed_use_ = nullptr;
700 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 // Invalidate size and weight of this range. The child range has them
703 // invalid at construction.
704 size_ = kInvalidSize;
705 weight_ = kInvalidWeight;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 VerifyChildStructure();
708 result->VerifyChildStructure();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000710 return use_before;
711}
712
713
714void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
715 LiveRange* child = this;
716 for (; child != nullptr; child = child->next()) {
717 child->top_level_ = new_top_level;
718 }
719}
720
721
722void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
723 const InstructionOperand& spill_op) {
724 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
725 DCHECK(Start() <= pos->pos() && pos->pos() <= End());
726 if (!pos->HasOperand()) continue;
727 switch (pos->type()) {
728 case UsePositionType::kRequiresSlot:
Ben Murdochc5610432016-08-08 18:44:38 +0100729 DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000730 InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
731 break;
732 case UsePositionType::kRequiresRegister:
Ben Murdochc5610432016-08-08 18:44:38 +0100733 DCHECK(op.IsRegister() || op.IsFPRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 // Fall through.
735 case UsePositionType::kAny:
736 InstructionOperand::ReplaceWith(pos->operand(), &op);
737 break;
738 }
739 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740}
741
742
743// This implements an ordering on live ranges so that they are ordered by their
744// start positions. This is needed for the correctness of the register
745// allocation algorithm. If two live ranges start at the same offset then there
746// is a tie breaker based on where the value is first used. This part of the
747// ordering is merely a heuristic.
748bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
749 LifetimePosition start = Start();
750 LifetimePosition other_start = other->Start();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000751 if (start == other_start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400753 if (pos == nullptr) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 UsePosition* other_pos = other->first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400755 if (other_pos == nullptr) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000756 return pos->pos() < other_pos->pos();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000758 return start < other_start;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759}
760
761
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762void LiveRange::SetUseHints(int register_index) {
763 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
764 if (!pos->HasOperand()) continue;
765 switch (pos->type()) {
766 case UsePositionType::kRequiresSlot:
767 break;
768 case UsePositionType::kRequiresRegister:
769 case UsePositionType::kAny:
770 pos->set_assigned_register(register_index);
771 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 }
774}
775
776
777bool LiveRange::CanCover(LifetimePosition position) const {
778 if (IsEmpty()) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 return Start() <= position && position < End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780}
781
782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000783bool LiveRange::Covers(LifetimePosition position) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 if (!CanCover(position)) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785 UseInterval* start_search = FirstSearchIntervalForPosition(position);
786 for (UseInterval* interval = start_search; interval != nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 interval = interval->next()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400788 DCHECK(interval->next() == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000789 interval->next()->start() >= interval->start());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790 AdvanceLastProcessedMarker(interval, position);
791 if (interval->Contains(position)) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000792 if (interval->start() > position) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 }
794 return false;
795}
796
797
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000798LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
799 UseInterval* b = other->first_interval();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400800 if (b == nullptr) return LifetimePosition::Invalid();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 LifetimePosition advance_last_processed_up_to = b->start();
802 UseInterval* a = FirstSearchIntervalForPosition(b->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400803 while (a != nullptr && b != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 if (a->start() > other->End()) break;
805 if (b->start() > End()) break;
806 LifetimePosition cur_intersection = a->Intersect(b);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 if (cur_intersection.IsValid()) {
808 return cur_intersection;
809 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 if (a->start() < b->start()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 a = a->next();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 if (a == nullptr || a->start() > other->End()) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
814 } else {
815 b = b->next();
816 }
817 }
818 return LifetimePosition::Invalid();
819}
820
821
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822unsigned LiveRange::GetSize() {
823 if (size_ == kInvalidSize) {
824 size_ = 0;
825 for (const UseInterval* interval = first_interval(); interval != nullptr;
826 interval = interval->next()) {
827 size_ += (interval->end().value() - interval->start().value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 }
829 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830
831 return static_cast<unsigned>(size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000832}
833
834
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000835void LiveRange::Print(const RegisterConfiguration* config,
836 bool with_children) const {
837 OFStream os(stdout);
838 PrintableLiveRange wrapper;
839 wrapper.register_configuration_ = config;
840 for (const LiveRange* i = this; i != nullptr; i = i->next()) {
841 wrapper.range_ = i;
842 os << wrapper << std::endl;
843 if (!with_children) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 }
845}
846
847
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000848void LiveRange::Print(bool with_children) const {
849 const RegisterConfiguration* config =
850 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN);
851 Print(config, with_children);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852}
853
854
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
856 SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
857 SpillMoveInsertionList* next)
858 : gap_index(gap_index), operand(operand), next(next) {}
859 const int gap_index;
860 InstructionOperand* const operand;
861 SpillMoveInsertionList* const next;
862};
863
864
865TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
866 : LiveRange(0, rep, this),
867 vreg_(vreg),
868 last_child_id_(0),
869 splintered_from_(nullptr),
870 spill_operand_(nullptr),
871 spill_move_insertion_locations_(nullptr),
872 spilled_in_deferred_blocks_(false),
873 spill_start_index_(kMaxInt),
874 last_pos_(nullptr),
875 splinter_(nullptr),
876 has_preassigned_slot_(false) {
877 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
878}
879
880
881#if DEBUG
882int TopLevelLiveRange::debug_virt_reg() const {
883 return IsSplinter() ? splintered_from()->vreg() : vreg();
884}
885#endif
886
887
888void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
889 InstructionOperand* operand) {
890 DCHECK(HasNoSpillType());
891 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
892 gap_index, operand, spill_move_insertion_locations_);
893}
894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
896 const InstructionOperand& op,
897 bool might_be_duplicated) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100898 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 Zone* zone = sequence->zone();
900
Ben Murdoch097c5b22016-05-18 11:27:45 +0100901 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 to_spill != nullptr; to_spill = to_spill->next) {
903 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
904 ParallelMove* move =
905 instr->GetOrCreateParallelMove(Instruction::START, zone);
906 // Skip insertion if it's possible that the move exists already as a
907 // constraint move from a fixed output register to a slot.
908 if (might_be_duplicated || has_preassigned_slot()) {
909 bool found = false;
910 for (MoveOperands* move_op : *move) {
911 if (move_op->IsEliminated()) continue;
912 if (move_op->source().Equals(*to_spill->operand) &&
913 move_op->destination().Equals(op)) {
914 found = true;
915 if (has_preassigned_slot()) move_op->Eliminate();
916 break;
917 }
918 }
919 if (found) continue;
920 }
921 if (!has_preassigned_slot()) {
922 move->AddMove(*to_spill->operand, op);
923 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000924 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000925}
926
927
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
929 DCHECK(HasNoSpillType());
930 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
931 set_spill_type(SpillType::kSpillOperand);
932 spill_operand_ = operand;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933}
934
935
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000936void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
937 DCHECK(!HasSpillOperand());
938 DCHECK(spill_range);
939 spill_range_ = spill_range;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940}
941
942
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000943AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
944 SpillRange* spill_range = GetSpillRange();
945 int index = spill_range->assigned_slot();
946 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947}
948
949
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
951 Zone* zone) {
952 DCHECK(start != Start() || end != End());
953 DCHECK(start < end);
954
955 TopLevelLiveRange splinter_temp(-1, representation());
956 UsePosition* last_in_splinter = nullptr;
957 // Live ranges defined in deferred blocks stay in deferred blocks, so we
958 // don't need to splinter them. That means that start should always be
959 // after the beginning of the range.
960 DCHECK(start > Start());
961
962 if (end >= End()) {
963 DCHECK(start > Start());
964 DetachAt(start, &splinter_temp, zone);
965 next_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967 DCHECK(start < End() && Start() < end);
968
969 const int kInvalidId = std::numeric_limits<int>::max();
970
971 UsePosition* last = DetachAt(start, &splinter_temp, zone);
972
973 LiveRange end_part(kInvalidId, this->representation(), nullptr);
974 last_in_splinter = splinter_temp.DetachAt(end, &end_part, zone);
975
976 next_ = end_part.next_;
977 last_interval_->set_next(end_part.first_interval_);
978 // The next splinter will happen either at or after the current interval.
979 // We can optimize DetachAt by setting current_interval_ accordingly,
980 // which will then be picked up by FirstSearchIntervalForPosition.
981 current_interval_ = last_interval_;
982 last_interval_ = end_part.last_interval_;
983
984 if (first_pos_ == nullptr) {
985 first_pos_ = end_part.first_pos_;
986 } else {
987 splitting_pointer_ = last;
988 if (last != nullptr) last->set_next(end_part.first_pos_);
989 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000991
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000992 if (splinter()->IsEmpty()) {
993 splinter()->first_interval_ = splinter_temp.first_interval_;
994 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 splinter()->last_interval_->set_next(splinter_temp.first_interval_);
997 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 if (splinter()->first_pos() == nullptr) {
1000 splinter()->first_pos_ = splinter_temp.first_pos_;
1001 } else {
1002 splinter()->last_pos_->set_next(splinter_temp.first_pos_);
1003 }
1004 if (last_in_splinter != nullptr) {
1005 splinter()->last_pos_ = last_in_splinter;
1006 } else {
1007 if (splinter()->first_pos() != nullptr &&
1008 splinter()->last_pos_ == nullptr) {
1009 splinter()->last_pos_ = splinter()->first_pos();
1010 for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
1011 pos = pos->next()) {
1012 splinter()->last_pos_ = pos;
1013 }
1014 }
1015 }
1016#if DEBUG
1017 Verify();
1018 splinter()->Verify();
1019#endif
1020}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022
1023void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
1024 splintered_from_ = splinter_parent;
1025 if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
1026 SetSpillRange(splinter_parent->spill_range_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001027 }
1028}
1029
1030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
1032 DCHECK(merged->TopLevel() == this);
1033
1034 if (HasNoSpillType() && merged->HasSpillRange()) {
1035 set_spill_type(merged->spill_type());
1036 DCHECK(GetSpillRange()->live_ranges().size() > 0);
1037 merged->spill_range_ = nullptr;
1038 merged->bits_ =
1039 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041}
1042
1043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
1045 DCHECK(Start() < other->Start());
1046 DCHECK(other->splintered_from() == this);
1047
1048 LiveRange* first = this;
1049 LiveRange* second = other;
1050 DCHECK(first->Start() < second->Start());
1051 while (first != nullptr && second != nullptr) {
1052 DCHECK(first != second);
1053 // Make sure the ranges are in order each time we iterate.
1054 if (second->Start() < first->Start()) {
1055 LiveRange* tmp = second;
1056 second = first;
1057 first = tmp;
1058 continue;
1059 }
1060
1061 if (first->End() <= second->Start()) {
1062 if (first->next() == nullptr ||
1063 first->next()->Start() > second->Start()) {
1064 // First is in order before second.
1065 LiveRange* temp = first->next();
1066 first->next_ = second;
1067 first = temp;
1068 } else {
1069 // First is in order before its successor (or second), so advance first.
1070 first = first->next();
1071 }
1072 continue;
1073 }
1074
1075 DCHECK(first->Start() < second->Start());
1076 // If first and second intersect, split first.
1077 if (first->Start() < second->End() && second->Start() < first->End()) {
1078 LiveRange* temp = first->SplitAt(second->Start(), zone);
1079 CHECK(temp != first);
1080 temp->set_spilled(first->spilled());
1081 if (!temp->spilled())
1082 temp->set_assigned_register(first->assigned_register());
1083
1084 first->next_ = second;
1085 first = temp;
1086 continue;
1087 }
1088 DCHECK(first->End() <= second->Start());
1089 }
1090
1091 TopLevel()->UpdateParentForAllChildren(TopLevel());
1092 TopLevel()->UpdateSpillRangePostMerge(other);
1093
1094#if DEBUG
1095 Verify();
1096#endif
1097}
1098
1099
1100void TopLevelLiveRange::VerifyChildrenInOrder() const {
1101 LifetimePosition last_end = End();
1102 for (const LiveRange* child = this->next(); child != nullptr;
1103 child = child->next()) {
1104 DCHECK(last_end <= child->Start());
1105 last_end = child->End();
1106 }
1107}
1108
1109
1110void TopLevelLiveRange::Verify() const {
1111 VerifyChildrenInOrder();
1112 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1113 VerifyChildStructure();
1114 }
1115}
1116
1117
1118void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1119 TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1120 DCHECK(first_interval_ != nullptr);
1121 DCHECK(first_interval_->start() <= start);
1122 DCHECK(start < first_interval_->end());
1123 first_interval_->set_start(start);
1124}
1125
1126
1127void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1128 LifetimePosition end, Zone* zone) {
1129 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1130 end.value());
1131 LifetimePosition new_end = end;
1132 while (first_interval_ != nullptr && first_interval_->start() <= end) {
1133 if (first_interval_->end() > end) {
1134 new_end = first_interval_->end();
1135 }
1136 first_interval_ = first_interval_->next();
1137 }
1138
1139 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1140 new_interval->set_next(first_interval_);
1141 first_interval_ = new_interval;
1142 if (new_interval->next() == nullptr) {
1143 last_interval_ = new_interval;
1144 }
1145}
1146
1147
1148void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1149 LifetimePosition end, Zone* zone) {
1150 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1151 end.value());
1152 if (first_interval_ == nullptr) {
1153 UseInterval* interval = new (zone) UseInterval(start, end);
1154 first_interval_ = interval;
1155 last_interval_ = interval;
1156 } else {
1157 if (end == first_interval_->start()) {
1158 first_interval_->set_start(start);
1159 } else if (end < first_interval_->start()) {
1160 UseInterval* interval = new (zone) UseInterval(start, end);
1161 interval->set_next(first_interval_);
1162 first_interval_ = interval;
1163 } else {
1164 // Order of instruction's processing (see ProcessInstructions) guarantees
1165 // that each new use interval either precedes or intersects with
1166 // last added interval.
1167 DCHECK(start < first_interval_->end());
1168 first_interval_->set_start(Min(start, first_interval_->start()));
1169 first_interval_->set_end(Max(end, first_interval_->end()));
1170 }
1171 }
1172}
1173
1174
1175void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1176 LifetimePosition pos = use_pos->pos();
1177 TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1178 UsePosition* prev_hint = nullptr;
1179 UsePosition* prev = nullptr;
1180 UsePosition* current = first_pos_;
1181 while (current != nullptr && current->pos() < pos) {
1182 prev_hint = current->HasHint() ? current : prev_hint;
1183 prev = current;
1184 current = current->next();
1185 }
1186
1187 if (prev == nullptr) {
1188 use_pos->set_next(first_pos_);
1189 first_pos_ = use_pos;
1190 } else {
1191 use_pos->set_next(prev->next());
1192 prev->set_next(use_pos);
1193 }
1194
1195 if (prev_hint == nullptr && use_pos->HasHint()) {
1196 current_hint_position_ = use_pos;
1197 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001198}
1199
1200
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001201static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1202 UseInterval* interval2) {
1203 while (interval1 != nullptr && interval2 != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204 if (interval1->start() < interval2->start()) {
1205 if (interval1->end() > interval2->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001206 return true;
1207 }
1208 interval1 = interval1->next();
1209 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001210 if (interval2->end() > interval1->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001211 return true;
1212 }
1213 interval2 = interval2->next();
1214 }
1215 }
1216 return false;
1217}
1218
1219
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001220std::ostream& operator<<(std::ostream& os,
1221 const PrintableLiveRange& printable_range) {
1222 const LiveRange* range = printable_range.range_;
1223 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1224 << " ";
1225 if (range->TopLevel()->is_phi()) os << "phi ";
1226 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1227
1228 os << "{" << std::endl;
1229 UseInterval* interval = range->first_interval();
1230 UsePosition* use_pos = range->first_pos();
1231 PrintableInstructionOperand pio;
1232 pio.register_configuration_ = printable_range.register_configuration_;
1233 while (use_pos != nullptr) {
1234 if (use_pos->HasOperand()) {
1235 pio.op_ = *use_pos->operand();
1236 os << pio << use_pos->pos() << " ";
1237 }
1238 use_pos = use_pos->next();
1239 }
1240 os << std::endl;
1241
1242 while (interval != nullptr) {
1243 os << '[' << interval->start() << ", " << interval->end() << ')'
1244 << std::endl;
1245 interval = interval->next();
1246 }
1247 os << "}";
1248 return os;
1249}
1250
1251
1252SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1253 : live_ranges_(zone),
1254 assigned_slot_(kUnassignedSlot),
1255 byte_width_(GetByteWidth(parent->representation())),
1256 kind_(parent->kind()) {
1257 // Spill ranges are created for top level, non-splintered ranges. This is so
1258 // that, when merging decisions are made, we consider the full extent of the
1259 // virtual register, and avoid clobbering it.
1260 DCHECK(!parent->IsSplinter());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001261 UseInterval* result = nullptr;
1262 UseInterval* node = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001263 // Copy the intervals for all ranges.
1264 for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1265 UseInterval* src = range->first_interval();
1266 while (src != nullptr) {
1267 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1268 if (result == nullptr) {
1269 result = new_node;
1270 } else {
1271 node->set_next(new_node);
1272 }
1273 node = new_node;
1274 src = src->next();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001275 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001276 }
1277 use_interval_ = result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001278 live_ranges().push_back(parent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001279 end_position_ = node->end();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 parent->SetSpillRange(this);
1281}
1282
1283
1284int SpillRange::ByteWidth() const {
1285 return GetByteWidth(live_ranges_[0]->representation());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001286}
1287
1288
1289bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1290 if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001291 this->End() <= other->use_interval_->start() ||
1292 other->End() <= this->use_interval_->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001293 return false;
1294 }
1295 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1296}
1297
1298
1299bool SpillRange::TryMerge(SpillRange* other) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001300 if (HasSlot() || other->HasSlot()) return false;
1301 // TODO(dcarney): byte widths should be compared here not kinds.
1302 if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() ||
1303 IsIntersectingWith(other)) {
1304 return false;
1305 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 LifetimePosition max = LifetimePosition::MaxPosition();
1308 if (End() < other->End() && other->End() != max) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001309 end_position_ = other->End();
1310 }
1311 other->end_position_ = max;
1312
1313 MergeDisjointIntervals(other->use_interval_);
1314 other->use_interval_ = nullptr;
1315
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001316 for (TopLevelLiveRange* range : other->live_ranges()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001317 DCHECK(range->GetSpillRange() == other);
1318 range->SetSpillRange(this);
1319 }
1320
1321 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1322 other->live_ranges().end());
1323 other->live_ranges().clear();
1324
1325 return true;
1326}
1327
1328
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001329void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1330 UseInterval* tail = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001331 UseInterval* current = use_interval_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001332 while (other != nullptr) {
1333 // Make sure the 'current' list starts first
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001334 if (current == nullptr || current->start() > other->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001335 std::swap(current, other);
1336 }
1337 // Check disjointness
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001338 DCHECK(other == nullptr || current->end() <= other->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001339 // Append the 'current' node to the result accumulator and move forward
1340 if (tail == nullptr) {
1341 use_interval_ = current;
1342 } else {
1343 tail->set_next(current);
1344 }
1345 tail = current;
1346 current = current->next();
1347 }
1348 // Other list is empty => we are done
1349}
1350
1351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352void SpillRange::Print() const {
1353 OFStream os(stdout);
1354 os << "{" << std::endl;
1355 for (TopLevelLiveRange* range : live_ranges()) {
1356 os << range->vreg() << " ";
1357 }
1358 os << std::endl;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001359
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360 for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1361 os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1362 }
1363 os << "}" << std::endl;
1364}
1365
1366
1367RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1368 const InstructionBlock* block,
1369 Zone* zone)
1370 : phi_(phi),
1371 block_(block),
1372 incoming_operands_(zone),
1373 assigned_register_(kUnassignedRegister) {
1374 incoming_operands_.reserve(phi->operands().size());
1375}
1376
1377
1378void RegisterAllocationData::PhiMapValue::AddOperand(
1379 InstructionOperand* operand) {
1380 incoming_operands_.push_back(operand);
1381}
1382
1383
1384void RegisterAllocationData::PhiMapValue::CommitAssignment(
1385 const InstructionOperand& assigned) {
1386 for (InstructionOperand* operand : incoming_operands_) {
1387 InstructionOperand::ReplaceWith(operand, &assigned);
1388 }
1389}
1390
1391
1392RegisterAllocationData::RegisterAllocationData(
1393 const RegisterConfiguration* config, Zone* zone, Frame* frame,
1394 InstructionSequence* code, const char* debug_name)
1395 : allocation_zone_(zone),
1396 frame_(frame),
1397 code_(code),
1398 debug_name_(debug_name),
1399 config_(config),
1400 phi_map_(allocation_zone()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001401 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1402 live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1403 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1404 allocation_zone()),
1405 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1406 allocation_zone()),
1407 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1408 allocation_zone()),
1409 spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1410 delayed_references_(allocation_zone()),
1411 assigned_registers_(nullptr),
1412 assigned_double_registers_(nullptr),
1413 virtual_register_count_(code->VirtualRegisterCount()),
1414 preassigned_slot_ranges_(zone) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 assigned_registers_ = new (code_zone())
1416 BitVector(this->config()->num_general_registers(), code_zone());
1417 assigned_double_registers_ = new (code_zone())
1418 BitVector(this->config()->num_double_registers(), code_zone());
1419 this->frame()->SetAllocatedRegisters(assigned_registers_);
1420 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1421}
1422
1423
1424MoveOperands* RegisterAllocationData::AddGapMove(
1425 int index, Instruction::GapPosition position,
1426 const InstructionOperand& from, const InstructionOperand& to) {
1427 Instruction* instr = code()->InstructionAt(index);
1428 ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1429 return moves->AddMove(from, to);
1430}
1431
1432
1433MachineRepresentation RegisterAllocationData::RepresentationFor(
1434 int virtual_register) {
1435 DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1436 return code()->GetRepresentation(virtual_register);
1437}
1438
1439
1440TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1441 if (index >= static_cast<int>(live_ranges().size())) {
1442 live_ranges().resize(index + 1, nullptr);
1443 }
1444 TopLevelLiveRange* result = live_ranges()[index];
1445 if (result == nullptr) {
1446 result = NewLiveRange(index, RepresentationFor(index));
1447 live_ranges()[index] = result;
1448 }
1449 return result;
1450}
1451
1452
1453TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1454 int index, MachineRepresentation rep) {
1455 return new (allocation_zone()) TopLevelLiveRange(index, rep);
1456}
1457
1458
1459int RegisterAllocationData::GetNextLiveRangeId() {
1460 int vreg = virtual_register_count_++;
1461 if (vreg >= static_cast<int>(live_ranges().size())) {
1462 live_ranges().resize(vreg + 1, nullptr);
1463 }
1464 return vreg;
1465}
1466
1467
1468TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1469 MachineRepresentation rep) {
1470 int vreg = GetNextLiveRangeId();
1471 TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1472 return ret;
1473}
1474
1475
1476RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1477 const InstructionBlock* block, PhiInstruction* phi) {
1478 RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1479 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1480 auto res =
1481 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1482 DCHECK(res.second);
1483 USE(res);
1484 return map_value;
1485}
1486
1487
1488RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1489 int virtual_register) {
1490 auto it = phi_map_.find(virtual_register);
1491 DCHECK(it != phi_map_.end());
1492 return it->second;
1493}
1494
1495
1496RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1497 TopLevelLiveRange* top_range) {
1498 return GetPhiMapValueFor(top_range->vreg());
1499}
1500
1501
1502bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1503 bool found = false;
1504 BitVector::Iterator iterator(live_in_sets()[0]);
1505 while (!iterator.Done()) {
1506 found = true;
1507 int operand_index = iterator.Current();
1508 PrintF("Register allocator error: live v%d reached first block.\n",
1509 operand_index);
1510 LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1511 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1512 if (debug_name() == nullptr) {
1513 PrintF("\n");
1514 } else {
1515 PrintF(" (function: %s)\n", debug_name());
1516 }
1517 iterator.Advance();
1518 }
1519 return found;
1520}
1521
1522
1523// If a range is defined in a deferred block, we can expect all the range
1524// to only cover positions in deferred blocks. Otherwise, a block on the
1525// hot path would be dominated by a deferred block, meaning it is unreachable
1526// without passing through the deferred block, which is contradictory.
1527// In particular, when such a range contributes a result back on the hot
1528// path, it will be as one of the inputs of a phi. In that case, the value
1529// will be transferred via a move in the Gap::END's of the last instruction
1530// of a deferred block.
1531bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1532 for (const TopLevelLiveRange* range : live_ranges()) {
1533 if (range == nullptr || range->IsEmpty() ||
1534 !code()
1535 ->GetInstructionBlock(range->Start().ToInstructionIndex())
1536 ->IsDeferred()) {
1537 continue;
1538 }
1539 for (const UseInterval* i = range->first_interval(); i != nullptr;
1540 i = i->next()) {
1541 int first = i->FirstGapIndex();
1542 int last = i->LastGapIndex();
1543 for (int instr = first; instr <= last;) {
1544 const InstructionBlock* block = code()->GetInstructionBlock(instr);
1545 if (!block->IsDeferred()) return false;
1546 instr = block->last_instruction_index() + 1;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001547 }
1548 }
1549 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001551}
1552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1554 TopLevelLiveRange* range) {
1555 DCHECK(!range->HasSpillOperand());
1556
1557 SpillRange* spill_range = range->GetAllocatedSpillRange();
1558 if (spill_range == nullptr) {
1559 DCHECK(!range->IsSplinter());
1560 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001561 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001563
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 int spill_range_index =
1565 range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 spill_ranges()[spill_range_index] = spill_range;
1568
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001569 return spill_range;
1570}
1571
1572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1574 TopLevelLiveRange* range) {
1575 DCHECK(!range->HasSpillOperand());
1576 DCHECK(!range->IsSplinter());
1577 SpillRange* spill_range =
1578 new (allocation_zone()) SpillRange(range, allocation_zone());
1579 return spill_range;
1580}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001581
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582
1583void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
Ben Murdochc5610432016-08-08 18:44:38 +01001584 if (kind == FP_REGISTERS) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 assigned_double_registers_->Add(index);
1586 } else {
1587 DCHECK(kind == GENERAL_REGISTERS);
1588 assigned_registers_->Add(index);
1589 }
1590}
1591
1592
1593bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1594 return pos.IsFullStart() &&
1595 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1596 pos.ToInstructionIndex();
1597}
1598
1599
1600ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1601 : data_(data) {}
1602
1603
1604InstructionOperand* ConstraintBuilder::AllocateFixed(
1605 UnallocatedOperand* operand, int pos, bool is_tagged) {
1606 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1607 DCHECK(operand->HasFixedPolicy());
1608 InstructionOperand allocated;
1609 MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1610 int virtual_register = operand->virtual_register();
1611 if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1612 rep = data()->RepresentationFor(virtual_register);
1613 }
1614 if (operand->HasFixedSlotPolicy()) {
1615 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1616 operand->fixed_slot_index());
1617 } else if (operand->HasFixedRegisterPolicy()) {
1618 DCHECK(!IsFloatingPoint(rep));
1619 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1620 operand->fixed_register_index());
1621 } else if (operand->HasFixedDoubleRegisterPolicy()) {
1622 DCHECK(IsFloatingPoint(rep));
1623 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1624 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1625 operand->fixed_register_index());
1626 } else {
1627 UNREACHABLE();
1628 }
1629 InstructionOperand::ReplaceWith(operand, &allocated);
1630 if (is_tagged) {
1631 TRACE("Fixed reg is tagged at %d\n", pos);
1632 Instruction* instr = code()->InstructionAt(pos);
1633 if (instr->HasReferenceMap()) {
1634 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1635 }
1636 }
1637 return operand;
1638}
1639
1640
1641void ConstraintBuilder::MeetRegisterConstraints() {
1642 for (InstructionBlock* block : code()->instruction_blocks()) {
1643 MeetRegisterConstraints(block);
1644 }
1645}
1646
1647
1648void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1649 int start = block->first_instruction_index();
1650 int end = block->last_instruction_index();
1651 DCHECK_NE(-1, start);
1652 for (int i = start; i <= end; ++i) {
1653 MeetConstraintsBefore(i);
1654 if (i != end) MeetConstraintsAfter(i);
1655 }
1656 // Meet register constraints for the instruction in the end.
1657 MeetRegisterConstraintsForLastInstructionInBlock(block);
1658}
1659
1660
1661void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1662 const InstructionBlock* block) {
1663 int end = block->last_instruction_index();
1664 Instruction* last_instruction = code()->InstructionAt(end);
1665 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1666 InstructionOperand* output_operand = last_instruction->OutputAt(i);
1667 DCHECK(!output_operand->IsConstant());
1668 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1669 int output_vreg = output->virtual_register();
1670 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1671 bool assigned = false;
1672 if (output->HasFixedPolicy()) {
1673 AllocateFixed(output, -1, false);
1674 // This value is produced on the stack, we never need to spill it.
1675 if (output->IsStackSlot()) {
1676 DCHECK(LocationOperand::cast(output)->index() <
1677 data()->frame()->GetSpillSlotCount());
1678 range->SetSpillOperand(LocationOperand::cast(output));
1679 range->SetSpillStartIndex(end);
1680 assigned = true;
1681 }
1682
1683 for (const RpoNumber& succ : block->successors()) {
1684 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1685 DCHECK(successor->PredecessorCount() == 1);
1686 int gap_index = successor->first_instruction_index();
1687 // Create an unconstrained operand for the same virtual register
1688 // and insert a gap move from the fixed output to the operand.
1689 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1690 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1691 }
1692 }
1693
1694 if (!assigned) {
1695 for (const RpoNumber& succ : block->successors()) {
1696 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1697 DCHECK(successor->PredecessorCount() == 1);
1698 int gap_index = successor->first_instruction_index();
1699 range->RecordSpillLocation(allocation_zone(), gap_index, output);
1700 range->SetSpillStartIndex(gap_index);
1701 }
1702 }
1703 }
1704}
1705
1706
1707void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1708 Instruction* first = code()->InstructionAt(instr_index);
1709 // Handle fixed temporaries.
1710 for (size_t i = 0; i < first->TempCount(); i++) {
1711 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1712 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1713 }
1714 // Handle constant/fixed output operands.
1715 for (size_t i = 0; i < first->OutputCount(); i++) {
1716 InstructionOperand* output = first->OutputAt(i);
1717 if (output->IsConstant()) {
1718 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1719 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1720 range->SetSpillStartIndex(instr_index + 1);
1721 range->SetSpillOperand(output);
1722 continue;
1723 }
1724 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1725 TopLevelLiveRange* range =
1726 data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1727 bool assigned = false;
1728 if (first_output->HasFixedPolicy()) {
1729 int output_vreg = first_output->virtual_register();
1730 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1731 bool is_tagged = code()->IsReference(output_vreg);
1732 if (first_output->HasSecondaryStorage()) {
1733 range->MarkHasPreassignedSlot();
1734 data()->preassigned_slot_ranges().push_back(
1735 std::make_pair(range, first_output->GetSecondaryStorage()));
1736 }
1737 AllocateFixed(first_output, instr_index, is_tagged);
1738
1739 // This value is produced on the stack, we never need to spill it.
1740 if (first_output->IsStackSlot()) {
1741 DCHECK(LocationOperand::cast(first_output)->index() <
1742 data()->frame()->GetTotalFrameSlotCount());
1743 range->SetSpillOperand(LocationOperand::cast(first_output));
1744 range->SetSpillStartIndex(instr_index + 1);
1745 assigned = true;
1746 }
1747 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1748 output_copy);
1749 }
1750 // Make sure we add a gap move for spilling (if we have not done
1751 // so already).
1752 if (!assigned) {
1753 range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1754 first_output);
1755 range->SetSpillStartIndex(instr_index + 1);
1756 }
1757 }
1758}
1759
1760
1761void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1762 Instruction* second = code()->InstructionAt(instr_index);
1763 // Handle fixed input operands of second instruction.
1764 for (size_t i = 0; i < second->InputCount(); i++) {
1765 InstructionOperand* input = second->InputAt(i);
1766 if (input->IsImmediate() || input->IsExplicit()) {
1767 continue; // Ignore immediates and explicitly reserved registers.
1768 }
1769 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1770 if (cur_input->HasFixedPolicy()) {
1771 int input_vreg = cur_input->virtual_register();
1772 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1773 bool is_tagged = code()->IsReference(input_vreg);
1774 AllocateFixed(cur_input, instr_index, is_tagged);
1775 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1776 }
1777 }
1778 // Handle "output same as input" for second instruction.
1779 for (size_t i = 0; i < second->OutputCount(); i++) {
1780 InstructionOperand* output = second->OutputAt(i);
1781 if (!output->IsUnallocated()) continue;
1782 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1783 if (!second_output->HasSameAsInputPolicy()) continue;
1784 DCHECK(i == 0); // Only valid for first output.
1785 UnallocatedOperand* cur_input =
1786 UnallocatedOperand::cast(second->InputAt(0));
1787 int output_vreg = second_output->virtual_register();
1788 int input_vreg = cur_input->virtual_register();
1789 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1790 cur_input->set_virtual_register(second_output->virtual_register());
1791 MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1792 input_copy, *cur_input);
1793 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1794 if (second->HasReferenceMap()) {
1795 RegisterAllocationData::DelayedReference delayed_reference = {
1796 second->reference_map(), &gap_move->source()};
1797 data()->delayed_references().push_back(delayed_reference);
1798 }
1799 } else if (!code()->IsReference(input_vreg) &&
1800 code()->IsReference(output_vreg)) {
1801 // The input is assumed to immediately have a tagged representation,
1802 // before the pointer map can be used. I.e. the pointer map at the
1803 // instruction will include the output operand (whose value at the
1804 // beginning of the instruction is equal to the input operand). If
1805 // this is not desired, then the pointer map at this instruction needs
1806 // to be adjusted manually.
1807 }
1808 }
1809}
1810
1811
1812void ConstraintBuilder::ResolvePhis() {
1813 // Process the blocks in reverse order.
1814 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1815 ResolvePhis(block);
1816 }
1817}
1818
1819
1820void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1821 for (PhiInstruction* phi : block->phis()) {
1822 int phi_vreg = phi->virtual_register();
1823 RegisterAllocationData::PhiMapValue* map_value =
1824 data()->InitializePhiMap(block, phi);
1825 InstructionOperand& output = phi->output();
1826 // Map the destination operands, so the commitment phase can find them.
1827 for (size_t i = 0; i < phi->operands().size(); ++i) {
1828 InstructionBlock* cur_block =
1829 code()->InstructionBlockAt(block->predecessors()[i]);
1830 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1831 MoveOperands* move = data()->AddGapMove(
1832 cur_block->last_instruction_index(), Instruction::END, input, output);
1833 map_value->AddOperand(&move->destination());
1834 DCHECK(!code()
1835 ->InstructionAt(cur_block->last_instruction_index())
1836 ->HasReferenceMap());
1837 }
1838 TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1839 int gap_index = block->first_instruction_index();
1840 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1841 live_range->SetSpillStartIndex(gap_index);
1842 // We use the phi-ness of some nodes in some later heuristics.
1843 live_range->set_is_phi(true);
1844 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1845 }
1846}
1847
1848
1849LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1850 Zone* local_zone)
1851 : data_(data), phi_hints_(local_zone) {}
1852
1853
1854BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1855 RegisterAllocationData* data) {
1856 size_t block_index = block->rpo_number().ToSize();
1857 BitVector* live_out = data->live_out_sets()[block_index];
1858 if (live_out == nullptr) {
1859 // Compute live out for the given block, except not including backward
1860 // successor edges.
1861 Zone* zone = data->allocation_zone();
1862 const InstructionSequence* code = data->code();
1863
1864 live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1865
1866 // Process all successor blocks.
1867 for (const RpoNumber& succ : block->successors()) {
1868 // Add values live on entry to the successor.
1869 if (succ <= block->rpo_number()) continue;
1870 BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1871 if (live_in != nullptr) live_out->Union(*live_in);
1872
1873 // All phi input operands corresponding to this successor edge are live
1874 // out from this block.
1875 const InstructionBlock* successor = code->InstructionBlockAt(succ);
1876 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1877 DCHECK(index < successor->PredecessorCount());
1878 for (PhiInstruction* phi : successor->phis()) {
1879 live_out->Add(phi->operands()[index]);
1880 }
1881 }
1882 data->live_out_sets()[block_index] = live_out;
1883 }
1884 return live_out;
1885}
1886
1887
1888void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1889 BitVector* live_out) {
1890 // Add an interval that includes the entire block to the live range for
1891 // each live_out value.
1892 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1893 block->first_instruction_index());
1894 LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1895 block->last_instruction_index())
1896 .NextStart();
1897 BitVector::Iterator iterator(live_out);
1898 while (!iterator.Done()) {
1899 int operand_index = iterator.Current();
1900 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1901 range->AddUseInterval(start, end, allocation_zone());
1902 iterator.Advance();
1903 }
1904}
1905
1906
1907int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
1908 return -index - 1 - config()->num_general_registers();
1909}
1910
1911
1912TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1913 DCHECK(index < config()->num_general_registers());
1914 TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1915 if (result == nullptr) {
1916 result = data()->NewLiveRange(FixedLiveRangeID(index),
1917 InstructionSequence::DefaultRepresentation());
1918 DCHECK(result->IsFixed());
1919 result->set_assigned_register(index);
1920 data()->MarkAllocated(GENERAL_REGISTERS, index);
1921 data()->fixed_live_ranges()[index] = result;
1922 }
1923 return result;
1924}
1925
1926
1927TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
1928 DCHECK(index < config()->num_double_registers());
1929 TopLevelLiveRange* result = data()->fixed_double_live_ranges()[index];
1930 if (result == nullptr) {
1931 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index),
1932 MachineRepresentation::kFloat64);
1933 DCHECK(result->IsFixed());
1934 result->set_assigned_register(index);
Ben Murdochc5610432016-08-08 18:44:38 +01001935 data()->MarkAllocated(FP_REGISTERS, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001936 data()->fixed_double_live_ranges()[index] = result;
1937 }
1938 return result;
1939}
1940
1941
1942TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1943 if (operand->IsUnallocated()) {
1944 return data()->GetOrCreateLiveRangeFor(
1945 UnallocatedOperand::cast(operand)->virtual_register());
1946 } else if (operand->IsConstant()) {
1947 return data()->GetOrCreateLiveRangeFor(
1948 ConstantOperand::cast(operand)->virtual_register());
1949 } else if (operand->IsRegister()) {
1950 return FixedLiveRangeFor(
1951 LocationOperand::cast(operand)->GetRegister().code());
Ben Murdochc5610432016-08-08 18:44:38 +01001952 } else if (operand->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001953 return FixedDoubleLiveRangeFor(
1954 LocationOperand::cast(operand)->GetDoubleRegister().code());
1955 } else {
1956 return nullptr;
1957 }
1958}
1959
1960
1961UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
1962 InstructionOperand* operand,
1963 void* hint,
1964 UsePositionHintType hint_type) {
1965 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1966}
1967
1968
1969UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1970 InstructionOperand* operand, void* hint,
1971 UsePositionHintType hint_type) {
1972 TopLevelLiveRange* range = LiveRangeFor(operand);
1973 if (range == nullptr) return nullptr;
1974
1975 if (range->IsEmpty() || range->Start() > position) {
1976 // Can happen if there is a definition without use.
1977 range->AddUseInterval(position, position.NextStart(), allocation_zone());
1978 range->AddUsePosition(NewUsePosition(position.NextStart()));
1979 } else {
1980 range->ShortenTo(position);
1981 }
1982 if (!operand->IsUnallocated()) return nullptr;
1983 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1984 UsePosition* use_pos =
1985 NewUsePosition(position, unalloc_operand, hint, hint_type);
1986 range->AddUsePosition(use_pos);
1987 return use_pos;
1988}
1989
1990
1991UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
1992 LifetimePosition position,
1993 InstructionOperand* operand, void* hint,
1994 UsePositionHintType hint_type) {
1995 TopLevelLiveRange* range = LiveRangeFor(operand);
1996 if (range == nullptr) return nullptr;
1997 UsePosition* use_pos = nullptr;
1998 if (operand->IsUnallocated()) {
1999 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2000 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2001 range->AddUsePosition(use_pos);
2002 }
2003 range->AddUseInterval(block_start, position, allocation_zone());
2004 return use_pos;
2005}
2006
2007
2008void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2009 BitVector* live) {
2010 int block_start = block->first_instruction_index();
2011 LifetimePosition block_start_position =
2012 LifetimePosition::GapFromInstructionIndex(block_start);
2013
2014 for (int index = block->last_instruction_index(); index >= block_start;
2015 index--) {
2016 LifetimePosition curr_position =
2017 LifetimePosition::InstructionFromInstructionIndex(index);
2018 Instruction* instr = code()->InstructionAt(index);
2019 DCHECK(instr != nullptr);
2020 DCHECK(curr_position.IsInstructionPosition());
2021 // Process output, inputs, and temps of this instruction.
2022 for (size_t i = 0; i < instr->OutputCount(); i++) {
2023 InstructionOperand* output = instr->OutputAt(i);
2024 if (output->IsUnallocated()) {
2025 // Unsupported.
2026 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2027 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2028 live->Remove(out_vreg);
2029 } else if (output->IsConstant()) {
2030 int out_vreg = ConstantOperand::cast(output)->virtual_register();
2031 live->Remove(out_vreg);
2032 }
2033 if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2034 output->IsRegister() &&
2035 AllocatedOperand::cast(output)->GetRegister().is(
2036 v8::internal::kReturnRegister0)) {
2037 // The register defined here is blocked from gap start - it is the
2038 // exception value.
2039 // TODO(mtrofin): should we explore an explicit opcode for
2040 // the first instruction in the handler?
2041 Define(LifetimePosition::GapFromInstructionIndex(index), output);
2042 } else {
2043 Define(curr_position, output);
2044 }
2045 }
2046
2047 if (instr->ClobbersRegisters()) {
2048 for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2049 int code = config()->GetAllocatableGeneralCode(i);
2050 if (!IsOutputRegisterOf(instr, Register::from_code(code))) {
2051 TopLevelLiveRange* range = FixedLiveRangeFor(code);
2052 range->AddUseInterval(curr_position, curr_position.End(),
2053 allocation_zone());
2054 }
2055 }
2056 }
2057
2058 if (instr->ClobbersDoubleRegisters()) {
2059 for (int i = 0; i < config()->num_allocatable_aliased_double_registers();
2060 ++i) {
2061 int code = config()->GetAllocatableDoubleCode(i);
2062 if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) {
2063 TopLevelLiveRange* range = FixedDoubleLiveRangeFor(code);
2064 range->AddUseInterval(curr_position, curr_position.End(),
2065 allocation_zone());
2066 }
2067 }
2068 }
2069
2070 for (size_t i = 0; i < instr->InputCount(); i++) {
2071 InstructionOperand* input = instr->InputAt(i);
2072 if (input->IsImmediate() || input->IsExplicit()) {
2073 continue; // Ignore immediates and explicitly reserved registers.
2074 }
2075 LifetimePosition use_pos;
2076 if (input->IsUnallocated() &&
2077 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2078 use_pos = curr_position;
2079 } else {
2080 use_pos = curr_position.End();
2081 }
2082
2083 if (input->IsUnallocated()) {
2084 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2085 int vreg = unalloc->virtual_register();
2086 live->Add(vreg);
2087 if (unalloc->HasSlotPolicy()) {
2088 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2089 }
2090 }
2091 Use(block_start_position, use_pos, input);
2092 }
2093
2094 for (size_t i = 0; i < instr->TempCount(); i++) {
2095 InstructionOperand* temp = instr->TempAt(i);
2096 // Unsupported.
2097 DCHECK_IMPLIES(temp->IsUnallocated(),
2098 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2099 if (instr->ClobbersTemps()) {
2100 if (temp->IsRegister()) continue;
2101 if (temp->IsUnallocated()) {
2102 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2103 if (temp_unalloc->HasFixedPolicy()) {
2104 continue;
2105 }
2106 }
2107 }
2108 Use(block_start_position, curr_position.End(), temp);
2109 Define(curr_position, temp);
2110 }
2111
2112 // Process the moves of the instruction's gaps, making their sources live.
2113 const Instruction::GapPosition kPositions[] = {Instruction::END,
2114 Instruction::START};
2115 curr_position = curr_position.PrevStart();
2116 DCHECK(curr_position.IsGapPosition());
2117 for (const Instruction::GapPosition& position : kPositions) {
2118 ParallelMove* move = instr->GetParallelMove(position);
2119 if (move == nullptr) continue;
2120 if (position == Instruction::END) {
2121 curr_position = curr_position.End();
2122 } else {
2123 curr_position = curr_position.Start();
2124 }
2125 for (MoveOperands* cur : *move) {
2126 InstructionOperand& from = cur->source();
2127 InstructionOperand& to = cur->destination();
2128 void* hint = &to;
2129 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2130 UsePosition* to_use = nullptr;
2131 int phi_vreg = -1;
2132 if (to.IsUnallocated()) {
2133 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2134 TopLevelLiveRange* to_range =
2135 data()->GetOrCreateLiveRangeFor(to_vreg);
2136 if (to_range->is_phi()) {
2137 phi_vreg = to_vreg;
2138 if (to_range->is_non_loop_phi()) {
2139 hint = to_range->current_hint_position();
2140 hint_type = hint == nullptr ? UsePositionHintType::kNone
2141 : UsePositionHintType::kUsePos;
2142 } else {
2143 hint_type = UsePositionHintType::kPhi;
2144 hint = data()->GetPhiMapValueFor(to_vreg);
2145 }
2146 } else {
2147 if (live->Contains(to_vreg)) {
2148 to_use = Define(curr_position, &to, &from,
2149 UsePosition::HintTypeForOperand(from));
2150 live->Remove(to_vreg);
2151 } else {
2152 cur->Eliminate();
2153 continue;
2154 }
2155 }
2156 } else {
2157 Define(curr_position, &to);
2158 }
2159 UsePosition* from_use =
2160 Use(block_start_position, curr_position, &from, hint, hint_type);
2161 // Mark range live.
2162 if (from.IsUnallocated()) {
2163 live->Add(UnallocatedOperand::cast(from).virtual_register());
2164 }
2165 // Resolve use position hints just created.
2166 if (to_use != nullptr && from_use != nullptr) {
2167 to_use->ResolveHint(from_use);
2168 from_use->ResolveHint(to_use);
2169 }
2170 DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2171 DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2172 // Potentially resolve phi hint.
2173 if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2174 }
2175 }
2176 }
2177}
2178
2179
2180void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2181 BitVector* live) {
2182 for (PhiInstruction* phi : block->phis()) {
2183 // The live range interval already ends at the first instruction of the
2184 // block.
2185 int phi_vreg = phi->virtual_register();
2186 live->Remove(phi_vreg);
2187 InstructionOperand* hint = nullptr;
Ben Murdochda12d292016-06-02 14:46:10 +01002188 const InstructionBlock::Predecessors& predecessors = block->predecessors();
2189 const InstructionBlock* predecessor_block =
2190 code()->InstructionBlockAt(predecessors[0]);
2191 const Instruction* instr = GetLastInstruction(code(), predecessor_block);
2192 if (predecessor_block->IsDeferred()) {
2193 // "Prefer the hint from the first non-deferred predecessor, if any.
2194 for (size_t i = 1; i < predecessors.size(); ++i) {
2195 predecessor_block = code()->InstructionBlockAt(predecessors[i]);
2196 if (!predecessor_block->IsDeferred()) {
2197 instr = GetLastInstruction(code(), predecessor_block);
2198 break;
2199 }
2200 }
2201 }
2202 DCHECK_NOT_NULL(instr);
2203
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002204 for (MoveOperands* move : *instr->GetParallelMove(Instruction::END)) {
2205 InstructionOperand& to = move->destination();
2206 if (to.IsUnallocated() &&
2207 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2208 hint = &move->source();
2209 break;
2210 }
2211 }
2212 DCHECK(hint != nullptr);
2213 LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2214 block->first_instruction_index());
2215 UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2216 UsePosition::HintTypeForOperand(*hint));
2217 MapPhiHint(hint, use_pos);
2218 }
2219}
2220
2221
2222void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2223 BitVector* live) {
2224 DCHECK(block->IsLoopHeader());
2225 // Add a live range stretching from the first loop instruction to the last
2226 // for each value live on entry to the header.
2227 BitVector::Iterator iterator(live);
2228 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2229 block->first_instruction_index());
2230 LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2231 code()->LastLoopInstructionIndex(block))
2232 .NextFullStart();
2233 while (!iterator.Done()) {
2234 int operand_index = iterator.Current();
2235 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2236 range->EnsureInterval(start, end, allocation_zone());
2237 iterator.Advance();
2238 }
2239 // Insert all values into the live in sets of all blocks in the loop.
2240 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2241 ++i) {
2242 live_in_sets()[i]->Union(*live);
2243 }
2244}
2245
2246
2247void LiveRangeBuilder::BuildLiveRanges() {
2248 // Process the blocks in reverse order.
2249 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2250 --block_id) {
2251 InstructionBlock* block =
2252 code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2253 BitVector* live = ComputeLiveOut(block, data());
2254 // Initially consider all live_out values live for the entire block. We
2255 // will shorten these intervals if necessary.
2256 AddInitialIntervals(block, live);
2257 // Process the instructions in reverse order, generating and killing
2258 // live values.
2259 ProcessInstructions(block, live);
2260 // All phi output operands are killed by this block.
2261 ProcessPhis(block, live);
2262 // Now live is live_in for this block except not including values live
2263 // out on backward successor edges.
2264 if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2265 live_in_sets()[block_id] = live;
2266 }
2267 // Postprocess the ranges.
2268 for (TopLevelLiveRange* range : data()->live_ranges()) {
2269 if (range == nullptr) continue;
2270 // Give slots to all ranges with a non fixed slot use.
2271 if (range->has_slot_use() && range->HasNoSpillType()) {
2272 data()->AssignSpillRangeToLiveRange(range);
2273 }
2274 // TODO(bmeurer): This is a horrible hack to make sure that for constant
2275 // live ranges, every use requires the constant to be in a register.
2276 // Without this hack, all uses with "any" policy would get the constant
2277 // operand assigned.
2278 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2279 for (UsePosition* pos = range->first_pos(); pos != nullptr;
2280 pos = pos->next()) {
2281 if (pos->type() == UsePositionType::kRequiresSlot) continue;
2282 UsePositionType new_type = UsePositionType::kAny;
2283 // Can't mark phis as needing a register.
2284 if (!pos->pos().IsGapPosition()) {
2285 new_type = UsePositionType::kRequiresRegister;
2286 }
2287 pos->set_type(new_type, true);
2288 }
2289 }
2290 }
2291 for (auto preassigned : data()->preassigned_slot_ranges()) {
2292 TopLevelLiveRange* range = preassigned.first;
2293 int slot_id = preassigned.second;
2294 SpillRange* spill = range->HasSpillRange()
2295 ? range->GetSpillRange()
2296 : data()->AssignSpillRangeToLiveRange(range);
2297 spill->set_assigned_slot(slot_id);
2298 }
2299#ifdef DEBUG
2300 Verify();
2301#endif
2302}
2303
2304
2305void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2306 UsePosition* use_pos) {
2307 DCHECK(!use_pos->IsResolved());
2308 auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2309 DCHECK(res.second);
2310 USE(res);
2311}
2312
2313
2314void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2315 UsePosition* use_pos) {
2316 auto it = phi_hints_.find(operand);
2317 if (it == phi_hints_.end()) return;
2318 DCHECK(!it->second->IsResolved());
2319 it->second->ResolveHint(use_pos);
2320}
2321
2322
2323void LiveRangeBuilder::Verify() const {
2324 for (auto& hint : phi_hints_) {
2325 CHECK(hint.second->IsResolved());
2326 }
Ben Murdochda12d292016-06-02 14:46:10 +01002327 for (const TopLevelLiveRange* current : data()->live_ranges()) {
2328 if (current != nullptr && !current->IsEmpty()) {
2329 // New LiveRanges should not be split.
2330 CHECK_NULL(current->next());
2331 // General integrity check.
2332 current->Verify();
2333 const UseInterval* first = current->first_interval();
2334 if (first->next() == nullptr) continue;
2335
2336 // Consecutive intervals should not end and start in the same block,
2337 // otherwise the intervals should have been joined, because the
2338 // variable is live throughout that block.
2339 CHECK(NextIntervalStartsInDifferentBlocks(first));
2340
2341 for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
2342 // Except for the first interval, the other intevals must start at
2343 // a block boundary, otherwise data wouldn't flow to them.
2344 CHECK(IntervalStartsAtBlockBoundary(i));
2345 // The last instruction of the predecessors of the block the interval
2346 // starts must be covered by the range.
2347 CHECK(IntervalPredecessorsCoveredByRange(i, current));
2348 if (i->next() != nullptr) {
2349 // Check the consecutive intervals property, except for the last
2350 // interval, where it doesn't apply.
2351 CHECK(NextIntervalStartsInDifferentBlocks(i));
2352 }
2353 }
2354 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002355 }
2356}
2357
Ben Murdochda12d292016-06-02 14:46:10 +01002358bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2359 const UseInterval* interval) const {
2360 LifetimePosition start = interval->start();
2361 if (!start.IsFullStart()) return false;
2362 int instruction_index = start.ToInstructionIndex();
2363 const InstructionBlock* block =
2364 data()->code()->GetInstructionBlock(instruction_index);
2365 return block->first_instruction_index() == instruction_index;
2366}
2367
2368bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2369 const UseInterval* interval, const TopLevelLiveRange* range) const {
2370 LifetimePosition start = interval->start();
2371 int instruction_index = start.ToInstructionIndex();
2372 const InstructionBlock* block =
2373 data()->code()->GetInstructionBlock(instruction_index);
2374 for (RpoNumber pred_index : block->predecessors()) {
2375 const InstructionBlock* predecessor =
2376 data()->code()->InstructionBlockAt(pred_index);
2377 LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2378 predecessor->last_instruction_index());
2379 last_pos = last_pos.NextStart().End();
2380 if (!range->Covers(last_pos)) return false;
2381 }
2382 return true;
2383}
2384
2385bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2386 const UseInterval* interval) const {
2387 DCHECK_NOT_NULL(interval->next());
2388 LifetimePosition end = interval->end();
2389 LifetimePosition next_start = interval->next()->start();
2390 // Since end is not covered, but the previous position is, move back a
2391 // position
2392 end = end.IsStart() ? end.PrevStart().End() : end.Start();
2393 int last_covered_index = end.ToInstructionIndex();
2394 const InstructionBlock* block =
2395 data()->code()->GetInstructionBlock(last_covered_index);
2396 const InstructionBlock* next_block =
2397 data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2398 return block->rpo_number() < next_block->rpo_number();
2399}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400
2401RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2402 RegisterKind kind)
2403 : data_(data),
2404 mode_(kind),
2405 num_registers_(GetRegisterCount(data->config(), kind)),
2406 num_allocatable_registers_(
2407 GetAllocatableRegisterCount(data->config(), kind)),
2408 allocatable_register_codes_(
2409 GetAllocatableRegisterCodes(data->config(), kind)) {}
2410
2411
2412LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2413 const LiveRange* range, int instruction_index) {
2414 LifetimePosition ret = LifetimePosition::Invalid();
2415
2416 ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2417 if (range->Start() >= ret || ret >= range->End()) {
2418 return LifetimePosition::Invalid();
2419 }
2420 return ret;
2421}
2422
2423
2424void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand(
2425 bool operands_only) {
2426 size_t initial_range_count = data()->live_ranges().size();
2427 for (size_t i = 0; i < initial_range_count; ++i) {
2428 TopLevelLiveRange* range = data()->live_ranges()[i];
2429 if (!CanProcessRange(range)) continue;
2430 if (range->HasNoSpillType() || (operands_only && range->HasSpillRange())) {
2431 continue;
2432 }
2433
2434 LifetimePosition start = range->Start();
2435 TRACE("Live range %d:%d is defined by a spill operand.\n",
2436 range->TopLevel()->vreg(), range->relative_id());
2437 LifetimePosition next_pos = start;
2438 if (next_pos.IsGapPosition()) {
2439 next_pos = next_pos.NextStart();
2440 }
2441 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2442 // If the range already has a spill operand and it doesn't need a
2443 // register immediately, split it and spill the first part of the range.
2444 if (pos == nullptr) {
2445 Spill(range);
2446 } else if (pos->pos() > range->Start().NextStart()) {
2447 // Do not spill live range eagerly if use position that can benefit from
2448 // the register is too close to the start of live range.
2449 LifetimePosition split_pos = GetSplitPositionForInstruction(
2450 range, pos->pos().ToInstructionIndex());
2451 // There is no place to split, so we can't split and spill.
2452 if (!split_pos.IsValid()) continue;
2453
2454 split_pos =
2455 FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2456
2457 SplitRangeAt(range, split_pos);
2458 Spill(range);
2459 }
2460 }
2461}
2462
2463
2464LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2465 LifetimePosition pos) {
2466 DCHECK(!range->TopLevel()->IsFixed());
2467 TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2468 range->relative_id(), pos.value());
2469
2470 if (pos <= range->Start()) return range;
2471
2472 // We can't properly connect liveranges if splitting occurred at the end
2473 // a block.
2474 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2475 (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2476 pos.ToInstructionIndex()));
2477
2478 LiveRange* result = range->SplitAt(pos, allocation_zone());
2479 return result;
2480}
2481
2482
2483LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2484 LifetimePosition start,
2485 LifetimePosition end) {
2486 DCHECK(!range->TopLevel()->IsFixed());
2487 TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2488 range->TopLevel()->vreg(), range->relative_id(), start.value(),
2489 end.value());
2490
2491 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2492 DCHECK(split_pos >= start);
2493 return SplitRangeAt(range, split_pos);
2494}
2495
2496
2497LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2498 LifetimePosition end) {
2499 int start_instr = start.ToInstructionIndex();
2500 int end_instr = end.ToInstructionIndex();
2501 DCHECK(start_instr <= end_instr);
2502
2503 // We have no choice
2504 if (start_instr == end_instr) return end;
2505
2506 const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2507 const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2508
2509 if (end_block == start_block) {
2510 // The interval is split in the same basic block. Split at the latest
2511 // possible position.
2512 return end;
2513 }
2514
2515 const InstructionBlock* block = end_block;
2516 // Find header of outermost loop.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002517 do {
2518 const InstructionBlock* loop = GetContainingLoop(code(), block);
2519 if (loop == nullptr ||
2520 loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2521 // No more loops or loop starts before the lifetime start.
2522 break;
2523 }
2524 block = loop;
2525 } while (true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526
2527 // We did not find any suitable outer loop. Split at the latest possible
2528 // position unless end_block is a loop header itself.
2529 if (block == end_block && !end_block->IsLoopHeader()) return end;
2530
2531 return LifetimePosition::GapFromInstructionIndex(
2532 block->first_instruction_index());
2533}
2534
2535
2536LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2537 LiveRange* range, LifetimePosition pos) {
2538 const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2539 const InstructionBlock* loop_header =
2540 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2541
2542 if (loop_header == nullptr) return pos;
2543
2544 const UsePosition* prev_use =
2545 range->PreviousUsePositionRegisterIsBeneficial(pos);
2546
2547 while (loop_header != nullptr) {
2548 // We are going to spill live range inside the loop.
2549 // If possible try to move spilling position backwards to loop header.
2550 // This will reduce number of memory moves on the back edge.
2551 LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2552 loop_header->first_instruction_index());
2553
2554 if (range->Covers(loop_start)) {
2555 if (prev_use == nullptr || prev_use->pos() < loop_start) {
2556 // No register beneficial use inside the loop before the pos.
2557 pos = loop_start;
2558 }
2559 }
2560
2561 // Try hoisting out to an outer loop.
2562 loop_header = GetContainingLoop(code(), loop_header);
2563 }
2564
2565 return pos;
2566}
2567
2568
2569void RegisterAllocator::Spill(LiveRange* range) {
2570 DCHECK(!range->spilled());
2571 TopLevelLiveRange* first = range->TopLevel();
2572 TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2573
2574 if (first->HasNoSpillType()) {
2575 data()->AssignSpillRangeToLiveRange(first);
2576 }
2577 range->Spill();
2578}
2579
2580
2581const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters()
2582 const {
Ben Murdochc5610432016-08-08 18:44:38 +01002583 return mode() == FP_REGISTERS ? data()->fixed_double_live_ranges()
2584 : data()->fixed_live_ranges();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002585}
2586
2587
2588const char* RegisterAllocator::RegisterName(int register_code) const {
2589 if (mode() == GENERAL_REGISTERS) {
2590 return data()->config()->GetGeneralRegisterName(register_code);
2591 } else {
2592 return data()->config()->GetDoubleRegisterName(register_code);
2593 }
2594}
2595
2596
2597LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2598 RegisterKind kind, Zone* local_zone)
2599 : RegisterAllocator(data, kind),
2600 unhandled_live_ranges_(local_zone),
2601 active_live_ranges_(local_zone),
2602 inactive_live_ranges_(local_zone) {
2603 unhandled_live_ranges().reserve(
2604 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
2605 active_live_ranges().reserve(8);
2606 inactive_live_ranges().reserve(8);
2607 // TryAllocateFreeReg and AllocateBlockedReg assume this
2608 // when allocating local arrays.
Ben Murdochc5610432016-08-08 18:44:38 +01002609 DCHECK(RegisterConfiguration::kMaxFPRegisters >=
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 this->data()->config()->num_general_registers());
2611}
2612
2613
2614void LinearScanAllocator::AllocateRegisters() {
2615 DCHECK(unhandled_live_ranges().empty());
2616 DCHECK(active_live_ranges().empty());
2617 DCHECK(inactive_live_ranges().empty());
2618
2619 SplitAndSpillRangesDefinedByMemoryOperand(code()->VirtualRegisterCount() <=
2620 num_allocatable_registers());
2621
2622 for (TopLevelLiveRange* range : data()->live_ranges()) {
2623 if (!CanProcessRange(range)) continue;
2624 for (LiveRange* to_add = range; to_add != nullptr;
2625 to_add = to_add->next()) {
2626 if (!to_add->spilled()) {
2627 AddToUnhandledUnsorted(to_add);
2628 }
2629 }
2630 }
2631 SortUnhandled();
2632 DCHECK(UnhandledIsSorted());
2633
2634 auto& fixed_ranges = GetFixedRegisters();
2635 for (TopLevelLiveRange* current : fixed_ranges) {
2636 if (current != nullptr) {
2637 DCHECK_EQ(mode(), current->kind());
2638 AddToInactive(current);
2639 }
2640 }
2641
2642 while (!unhandled_live_ranges().empty()) {
2643 DCHECK(UnhandledIsSorted());
2644 LiveRange* current = unhandled_live_ranges().back();
2645 unhandled_live_ranges().pop_back();
2646 DCHECK(UnhandledIsSorted());
2647 LifetimePosition position = current->Start();
2648#ifdef DEBUG
2649 allocation_finger_ = position;
2650#endif
2651 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2652 current->relative_id(), position.value());
2653
2654 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2655 continue;
2656
2657 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2658 LiveRange* cur_active = active_live_ranges()[i];
2659 if (cur_active->End() <= position) {
2660 ActiveToHandled(cur_active);
2661 --i; // The live range was removed from the list of active live ranges.
2662 } else if (!cur_active->Covers(position)) {
2663 ActiveToInactive(cur_active);
2664 --i; // The live range was removed from the list of active live ranges.
2665 }
2666 }
2667
2668 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2669 LiveRange* cur_inactive = inactive_live_ranges()[i];
2670 if (cur_inactive->End() <= position) {
2671 InactiveToHandled(cur_inactive);
2672 --i; // Live range was removed from the list of inactive live ranges.
2673 } else if (cur_inactive->Covers(position)) {
2674 InactiveToActive(cur_inactive);
2675 --i; // Live range was removed from the list of inactive live ranges.
2676 }
2677 }
2678
2679 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2680
2681 bool result = TryAllocateFreeReg(current);
2682 if (!result) AllocateBlockedReg(current);
2683 if (current->HasRegisterAssigned()) {
2684 AddToActive(current);
2685 }
2686 }
2687}
2688
2689
2690void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2691 int reg) {
2692 data()->MarkAllocated(range->kind(), reg);
2693 range->set_assigned_register(reg);
2694 range->SetUseHints(reg);
2695 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2696 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2697 }
2698}
2699
2700
2701void LinearScanAllocator::AddToActive(LiveRange* range) {
2702 TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
2703 range->relative_id());
2704 active_live_ranges().push_back(range);
2705}
2706
2707
2708void LinearScanAllocator::AddToInactive(LiveRange* range) {
2709 TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
2710 range->relative_id());
2711 inactive_live_ranges().push_back(range);
2712}
2713
2714
2715void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2716 if (range == nullptr || range->IsEmpty()) return;
2717 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2718 DCHECK(allocation_finger_ <= range->Start());
2719 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2720 --i) {
2721 LiveRange* cur_range = unhandled_live_ranges().at(i);
2722 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2723 TRACE("Add live range %d:%d to unhandled at %d\n",
2724 range->TopLevel()->vreg(), range->relative_id(), i + 1);
2725 auto it = unhandled_live_ranges().begin() + (i + 1);
2726 unhandled_live_ranges().insert(it, range);
2727 DCHECK(UnhandledIsSorted());
2728 return;
2729 }
2730 TRACE("Add live range %d:%d to unhandled at start\n",
2731 range->TopLevel()->vreg(), range->relative_id());
2732 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2733 DCHECK(UnhandledIsSorted());
2734}
2735
2736
2737void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2738 if (range == nullptr || range->IsEmpty()) return;
2739 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2740 TRACE("Add live range %d:%d to unhandled unsorted at end\n",
2741 range->TopLevel()->vreg(), range->relative_id());
2742 unhandled_live_ranges().push_back(range);
2743}
2744
2745
2746static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2747 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2748 if (a->ShouldBeAllocatedBefore(b)) return false;
2749 if (b->ShouldBeAllocatedBefore(a)) return true;
2750 return a->TopLevel()->vreg() < b->TopLevel()->vreg();
2751}
2752
2753
2754// Sort the unhandled live ranges so that the ranges to be processed first are
2755// at the end of the array list. This is convenient for the register allocation
2756// algorithm because it is efficient to remove elements from the end.
2757void LinearScanAllocator::SortUnhandled() {
2758 TRACE("Sort unhandled\n");
2759 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2760 &UnhandledSortHelper);
2761}
2762
2763
2764bool LinearScanAllocator::UnhandledIsSorted() {
2765 size_t len = unhandled_live_ranges().size();
2766 for (size_t i = 1; i < len; i++) {
2767 LiveRange* a = unhandled_live_ranges().at(i - 1);
2768 LiveRange* b = unhandled_live_ranges().at(i);
2769 if (a->Start() < b->Start()) return false;
2770 }
2771 return true;
2772}
2773
2774
2775void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2776 RemoveElement(&active_live_ranges(), range);
2777 TRACE("Moving live range %d:%d from active to handled\n",
2778 range->TopLevel()->vreg(), range->relative_id());
2779}
2780
2781
2782void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2783 RemoveElement(&active_live_ranges(), range);
2784 inactive_live_ranges().push_back(range);
2785 TRACE("Moving live range %d:%d from active to inactive\n",
2786 range->TopLevel()->vreg(), range->relative_id());
2787}
2788
2789
2790void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2791 RemoveElement(&inactive_live_ranges(), range);
2792 TRACE("Moving live range %d:%d from inactive to handled\n",
2793 range->TopLevel()->vreg(), range->relative_id());
2794}
2795
2796
2797void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2798 RemoveElement(&inactive_live_ranges(), range);
2799 active_live_ranges().push_back(range);
2800 TRACE("Moving live range %d:%d from inactive to active\n",
2801 range->TopLevel()->vreg(), range->relative_id());
2802}
2803
2804
2805bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
Ben Murdochc5610432016-08-08 18:44:38 +01002806 LifetimePosition free_until_pos[RegisterConfiguration::kMaxFPRegisters];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002807
2808 for (int i = 0; i < num_registers(); i++) {
2809 free_until_pos[i] = LifetimePosition::MaxPosition();
2810 }
2811
2812 for (LiveRange* cur_active : active_live_ranges()) {
2813 free_until_pos[cur_active->assigned_register()] =
2814 LifetimePosition::GapFromInstructionIndex(0);
2815 TRACE("Register %s is free until pos %d (1)\n",
2816 RegisterName(cur_active->assigned_register()),
2817 LifetimePosition::GapFromInstructionIndex(0).value());
2818 }
2819
2820 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2821 DCHECK(cur_inactive->End() > current->Start());
2822 LifetimePosition next_intersection =
2823 cur_inactive->FirstIntersection(current);
2824 if (!next_intersection.IsValid()) continue;
2825 int cur_reg = cur_inactive->assigned_register();
2826 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
2827 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2828 Min(free_until_pos[cur_reg], next_intersection).value());
2829 }
2830
2831 int hint_register;
2832 if (current->FirstHintPosition(&hint_register) != nullptr) {
2833 TRACE(
2834 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
2835 RegisterName(hint_register), free_until_pos[hint_register].value(),
2836 current->TopLevel()->vreg(), current->relative_id(),
2837 current->End().value());
2838
2839 // The desired register is free until the end of the current live range.
2840 if (free_until_pos[hint_register] >= current->End()) {
2841 TRACE("Assigning preferred reg %s to live range %d:%d\n",
2842 RegisterName(hint_register), current->TopLevel()->vreg(),
2843 current->relative_id());
2844 SetLiveRangeAssignedRegister(current, hint_register);
2845 return true;
2846 }
2847 }
2848
2849 // Find the register which stays free for the longest time.
2850 int reg = allocatable_register_code(0);
2851 for (int i = 1; i < num_allocatable_registers(); ++i) {
2852 int code = allocatable_register_code(i);
2853 if (free_until_pos[code] > free_until_pos[reg]) {
2854 reg = code;
2855 }
2856 }
2857
2858 LifetimePosition pos = free_until_pos[reg];
2859
2860 if (pos <= current->Start()) {
2861 // All registers are blocked.
2862 return false;
2863 }
2864
2865 if (pos < current->End()) {
2866 // Register reg is available at the range start but becomes blocked before
2867 // the range end. Split current at position where it becomes blocked.
2868 LiveRange* tail = SplitRangeAt(current, pos);
2869 AddToUnhandledSorted(tail);
2870 }
2871
2872 // Register reg is available at the range start and is free until
2873 // the range end.
2874 DCHECK(pos >= current->End());
2875 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2876 current->TopLevel()->vreg(), current->relative_id());
2877 SetLiveRangeAssignedRegister(current, reg);
2878
2879 return true;
2880}
2881
2882
2883void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2884 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2885 if (register_use == nullptr) {
2886 // There is no use in the current live range that requires a register.
2887 // We can just spill it.
2888 Spill(current);
2889 return;
2890 }
2891
Ben Murdochc5610432016-08-08 18:44:38 +01002892 LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
2893 LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002894
2895 for (int i = 0; i < num_registers(); i++) {
2896 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
2897 }
2898
2899 for (LiveRange* range : active_live_ranges()) {
2900 int cur_reg = range->assigned_register();
2901 if (range->TopLevel()->IsFixed() ||
2902 !range->CanBeSpilled(current->Start())) {
2903 block_pos[cur_reg] = use_pos[cur_reg] =
2904 LifetimePosition::GapFromInstructionIndex(0);
2905 } else {
2906 UsePosition* next_use =
2907 range->NextUsePositionRegisterIsBeneficial(current->Start());
2908 if (next_use == nullptr) {
2909 use_pos[cur_reg] = range->End();
2910 } else {
2911 use_pos[cur_reg] = next_use->pos();
2912 }
2913 }
2914 }
2915
2916 for (LiveRange* range : inactive_live_ranges()) {
2917 DCHECK(range->End() > current->Start());
2918 LifetimePosition next_intersection = range->FirstIntersection(current);
2919 if (!next_intersection.IsValid()) continue;
2920 int cur_reg = range->assigned_register();
2921 if (range->TopLevel()->IsFixed()) {
2922 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
2923 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
2924 } else {
2925 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
2926 }
2927 }
2928
2929 int reg = allocatable_register_code(0);
2930 for (int i = 1; i < num_allocatable_registers(); ++i) {
2931 int code = allocatable_register_code(i);
2932 if (use_pos[code] > use_pos[reg]) {
2933 reg = code;
2934 }
2935 }
2936
2937 LifetimePosition pos = use_pos[reg];
2938
2939 if (pos < register_use->pos()) {
Ben Murdochc5610432016-08-08 18:44:38 +01002940 if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
2941 register_use->pos())) {
2942 SpillBetween(current, current->Start(), register_use->pos());
2943 } else {
2944 SetLiveRangeAssignedRegister(current, reg);
2945 SplitAndSpillIntersecting(current);
2946 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002947 return;
2948 }
2949
2950 if (block_pos[reg] < current->End()) {
2951 // Register becomes blocked before the current range end. Split before that
2952 // position.
2953 LiveRange* tail =
2954 SplitBetween(current, current->Start(), block_pos[reg].Start());
2955 AddToUnhandledSorted(tail);
2956 }
2957
2958 // Register reg is not blocked for the whole range.
2959 DCHECK(block_pos[reg] >= current->End());
2960 TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
2961 current->TopLevel()->vreg(), current->relative_id());
2962 SetLiveRangeAssignedRegister(current, reg);
2963
2964 // This register was not free. Thus we need to find and spill
2965 // parts of active and inactive live regions that use the same register
2966 // at the same lifetime positions as current.
2967 SplitAndSpillIntersecting(current);
2968}
2969
2970
2971void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2972 DCHECK(current->HasRegisterAssigned());
2973 int reg = current->assigned_register();
2974 LifetimePosition split_pos = current->Start();
2975 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2976 LiveRange* range = active_live_ranges()[i];
2977 if (range->assigned_register() == reg) {
2978 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2979 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2980 if (next_pos == nullptr) {
2981 SpillAfter(range, spill_pos);
2982 } else {
2983 // When spilling between spill_pos and next_pos ensure that the range
2984 // remains spilled at least until the start of the current live range.
2985 // This guarantees that we will not introduce new unhandled ranges that
2986 // start before the current range as this violates allocation invariant
2987 // and will lead to an inconsistent state of active and inactive
2988 // live-ranges: ranges are allocated in order of their start positions,
2989 // ranges are retired from active/inactive when the start of the
2990 // current live-range is larger than their end.
Ben Murdochc5610432016-08-08 18:44:38 +01002991 DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
2992 next_pos->pos()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002993 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2994 }
2995 ActiveToHandled(range);
2996 --i;
2997 }
2998 }
2999
3000 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
3001 LiveRange* range = inactive_live_ranges()[i];
3002 DCHECK(range->End() > current->Start());
3003 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
3004 LifetimePosition next_intersection = range->FirstIntersection(current);
3005 if (next_intersection.IsValid()) {
3006 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3007 if (next_pos == nullptr) {
3008 SpillAfter(range, split_pos);
3009 } else {
3010 next_intersection = Min(next_intersection, next_pos->pos());
3011 SpillBetween(range, split_pos, next_intersection);
3012 }
3013 InactiveToHandled(range);
3014 --i;
3015 }
3016 }
3017 }
3018}
3019
3020
3021bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
3022 if (!range->is_phi()) return false;
3023
3024 DCHECK(!range->HasSpillOperand());
3025 RegisterAllocationData::PhiMapValue* phi_map_value =
3026 data()->GetPhiMapValueFor(range);
3027 const PhiInstruction* phi = phi_map_value->phi();
3028 const InstructionBlock* block = phi_map_value->block();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003029 // Count the number of spilled operands.
3030 size_t spilled_count = 0;
3031 LiveRange* first_op = nullptr;
3032 for (size_t i = 0; i < phi->operands().size(); i++) {
3033 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003034 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
3035 if (!op_range->TopLevel()->HasSpillRange()) continue;
3036 const InstructionBlock* pred =
3037 code()->InstructionBlockAt(block->predecessors()[i]);
3038 LifetimePosition pred_end =
3039 LifetimePosition::InstructionFromInstructionIndex(
3040 pred->last_instruction_index());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003041 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
3042 op_range = op_range->next();
3043 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003044 if (op_range != nullptr && op_range->spilled()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003045 spilled_count++;
3046 if (first_op == nullptr) {
3047 first_op = op_range->TopLevel();
3048 }
3049 }
3050 }
3051
3052 // Only continue if more than half of the operands are spilled.
3053 if (spilled_count * 2 <= phi->operands().size()) {
3054 return false;
3055 }
3056
3057 // Try to merge the spilled operands and count the number of merged spilled
3058 // operands.
3059 DCHECK(first_op != nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003060 SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003061 size_t num_merged = 1;
3062 for (size_t i = 1; i < phi->operands().size(); i++) {
3063 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003064 TopLevelLiveRange* op_range = data()->live_ranges()[op];
3065 if (!op_range->HasSpillRange()) continue;
3066 SpillRange* op_spill = op_range->GetSpillRange();
3067 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003068 num_merged++;
3069 }
3070 }
3071
3072 // Only continue if enough operands could be merged to the
3073 // same spill slot.
3074 if (num_merged * 2 <= phi->operands().size() ||
3075 AreUseIntervalsIntersecting(first_op_spill->interval(),
3076 range->first_interval())) {
3077 return false;
3078 }
3079
3080 // If the range does not need register soon, spill it to the merged
3081 // spill range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003082 LifetimePosition next_pos = range->Start();
3083 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3084 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003085 if (pos == nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003086 SpillRange* spill_range =
3087 range->TopLevel()->HasSpillRange()
3088 ? range->TopLevel()->GetSpillRange()
3089 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3090 bool merged = first_op_spill->TryMerge(spill_range);
Ben Murdochc5610432016-08-08 18:44:38 +01003091 if (!merged) return false;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003092 Spill(range);
3093 return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003094 } else if (pos->pos() > range->Start().NextStart()) {
3095 SpillRange* spill_range =
3096 range->TopLevel()->HasSpillRange()
3097 ? range->TopLevel()->GetSpillRange()
3098 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3099 bool merged = first_op_spill->TryMerge(spill_range);
Ben Murdochc5610432016-08-08 18:44:38 +01003100 if (!merged) return false;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003101 SpillBetween(range, range->Start(), pos->pos());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003102 DCHECK(UnhandledIsSorted());
3103 return true;
3104 }
3105 return false;
3106}
3107
3108
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003109void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3110 LiveRange* second_part = SplitRangeAt(range, pos);
3111 Spill(second_part);
3112}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003114
3115void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3116 LifetimePosition end) {
3117 SpillBetweenUntil(range, start, start, end);
3118}
3119
3120
3121void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3122 LifetimePosition start,
3123 LifetimePosition until,
3124 LifetimePosition end) {
3125 CHECK(start < end);
3126 LiveRange* second_part = SplitRangeAt(range, start);
3127
3128 if (second_part->Start() < end) {
3129 // The split result intersects with [start, end[.
3130 // Split it at position between ]start+1, end[, spill the middle part
3131 // and put the rest to unhandled.
3132 LifetimePosition third_part_end = end.PrevStart().End();
3133 if (data()->IsBlockBoundary(end.Start())) {
3134 third_part_end = end.Start();
3135 }
3136 LiveRange* third_part = SplitBetween(
3137 second_part, Max(second_part->Start().End(), until), third_part_end);
3138
3139 DCHECK(third_part != second_part);
3140
3141 Spill(second_part);
3142 AddToUnhandledSorted(third_part);
3143 } else {
3144 // The split result does not intersect with [start, end[.
3145 // Nothing to spill. Just put it to unhandled as whole.
3146 AddToUnhandledSorted(second_part);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003147 }
3148}
3149
3150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003151SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3152 : data_(data) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003153
3154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003155void SpillSlotLocator::LocateSpillSlots() {
3156 const InstructionSequence* code = data()->code();
3157 for (TopLevelLiveRange* range : data()->live_ranges()) {
3158 if (range == nullptr || range->IsEmpty()) continue;
3159 // We care only about ranges which spill in the frame.
Ben Murdochda12d292016-06-02 14:46:10 +01003160 if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
3161 continue;
3162 }
3163 TopLevelLiveRange::SpillMoveInsertionList* spills =
3164 range->GetSpillMoveInsertionLocations();
3165 DCHECK_NOT_NULL(spills);
3166 for (; spills != nullptr; spills = spills->next) {
3167 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003168 }
3169 }
3170}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003172
3173OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3174
3175
3176void OperandAssigner::AssignSpillSlots() {
3177 ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3178 // Merge disjoint spill ranges
3179 for (size_t i = 0; i < spill_ranges.size(); ++i) {
3180 SpillRange* range = spill_ranges[i];
3181 if (range == nullptr) continue;
3182 if (range->IsEmpty()) continue;
3183 for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
3184 SpillRange* other = spill_ranges[j];
3185 if (other != nullptr && !other->IsEmpty()) {
3186 range->TryMerge(other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003187 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003188 }
3189 }
3190 // Allocate slots for the merged spill ranges.
3191 for (SpillRange* range : spill_ranges) {
3192 if (range == nullptr || range->IsEmpty()) continue;
3193 // Allocate a new operand referring to the spill slot.
3194 if (!range->HasSlot()) {
3195 int byte_width = range->ByteWidth();
3196 int index = data()->frame()->AllocateSpillSlot(byte_width);
3197 range->set_assigned_slot(index);
3198 }
3199 }
3200}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003201
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003202
3203void OperandAssigner::CommitAssignment() {
3204 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3205 if (top_range == nullptr || top_range->IsEmpty()) continue;
3206 InstructionOperand spill_operand;
3207 if (top_range->HasSpillOperand()) {
3208 spill_operand = *top_range->TopLevel()->GetSpillOperand();
3209 } else if (top_range->TopLevel()->HasSpillRange()) {
3210 spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3211 }
3212 if (top_range->is_phi()) {
3213 data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3214 top_range->GetAssignedOperand());
3215 }
3216 for (LiveRange* range = top_range; range != nullptr;
3217 range = range->next()) {
3218 InstructionOperand assigned = range->GetAssignedOperand();
3219 range->ConvertUsesToOperand(assigned, spill_operand);
3220 }
3221
3222 if (!spill_operand.IsInvalid()) {
3223 // If this top level range has a child spilled in a deferred block, we use
3224 // the range and control flow connection mechanism instead of spilling at
3225 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3226 // phases. Normally, when we spill at definition, we do not insert a
3227 // connecting move when a successor child range is spilled - because the
3228 // spilled range picks up its value from the slot which was assigned at
3229 // definition. For ranges that are determined to spill only in deferred
Ben Murdoch097c5b22016-05-18 11:27:45 +01003230 // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3231 // where a spill operand is expected, and then finalize by inserting the
3232 // spills in the deferred blocks dominators.
3233 if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003234 // Spill at definition if the range isn't spilled only in deferred
3235 // blocks.
3236 top_range->CommitSpillMoves(
3237 data()->code(), spill_operand,
3238 top_range->has_slot_use() || top_range->spilled());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003239 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003240 }
3241 }
3242}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003244
3245ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3246 : data_(data) {}
3247
3248
3249bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3250 int safe_point = 0;
3251 for (ReferenceMap* map : *data()->code()->reference_maps()) {
3252 if (safe_point > map->instruction_position()) return false;
3253 safe_point = map->instruction_position();
3254 }
3255 return true;
3256}
3257
3258
3259void ReferenceMapPopulator::PopulateReferenceMaps() {
3260 DCHECK(SafePointsAreInOrder());
3261 // Map all delayed references.
3262 for (RegisterAllocationData::DelayedReference& delayed_reference :
3263 data()->delayed_references()) {
3264 delayed_reference.map->RecordReference(
3265 AllocatedOperand::cast(*delayed_reference.operand));
3266 }
3267 // Iterate over all safe point positions and record a pointer
3268 // for all spilled live ranges at this point.
3269 int last_range_start = 0;
3270 const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3271 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3272 for (TopLevelLiveRange* range : data()->live_ranges()) {
3273 if (range == nullptr) continue;
3274 // Skip non-reference values.
3275 if (!data()->IsReference(range)) continue;
3276 // Skip empty live ranges.
3277 if (range->IsEmpty()) continue;
3278 if (range->has_preassigned_slot()) continue;
3279
3280 // Find the extent of the range and its children.
3281 int start = range->Start().ToInstructionIndex();
3282 int end = 0;
3283 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3284 LifetimePosition this_end = cur->End();
3285 if (this_end.ToInstructionIndex() > end)
3286 end = this_end.ToInstructionIndex();
3287 DCHECK(cur->Start().ToInstructionIndex() >= start);
3288 }
3289
3290 // Most of the ranges are in order, but not all. Keep an eye on when they
3291 // step backwards and reset the first_it so we don't miss any safe points.
3292 if (start < last_range_start) first_it = reference_maps->begin();
3293 last_range_start = start;
3294
3295 // Step across all the safe points that are before the start of this range,
3296 // recording how far we step in order to save doing this for the next range.
3297 for (; first_it != reference_maps->end(); ++first_it) {
3298 ReferenceMap* map = *first_it;
3299 if (map->instruction_position() >= start) break;
3300 }
3301
3302 InstructionOperand spill_operand;
3303 if (((range->HasSpillOperand() &&
3304 !range->GetSpillOperand()->IsConstant()) ||
3305 range->HasSpillRange())) {
3306 if (range->HasSpillOperand()) {
3307 spill_operand = *range->GetSpillOperand();
3308 } else {
3309 spill_operand = range->GetSpillRangeOperand();
3310 }
3311 DCHECK(spill_operand.IsStackSlot());
3312 DCHECK_EQ(MachineRepresentation::kTagged,
3313 AllocatedOperand::cast(spill_operand).representation());
3314 }
3315
3316 LiveRange* cur = range;
3317 // Step through the safe points to see whether they are in the range.
3318 for (auto it = first_it; it != reference_maps->end(); ++it) {
3319 ReferenceMap* map = *it;
3320 int safe_point = map->instruction_position();
3321
3322 // The safe points are sorted so we can stop searching here.
3323 if (safe_point - 1 > end) break;
3324
3325 // Advance to the next active range that covers the current
3326 // safe point position.
3327 LifetimePosition safe_point_pos =
3328 LifetimePosition::InstructionFromInstructionIndex(safe_point);
3329
3330 // Search for the child range (cur) that covers safe_point_pos. If we
3331 // don't find it before the children pass safe_point_pos, keep cur at
3332 // the last child, because the next safe_point_pos may be covered by cur.
3333 // This may happen if cur has more than one interval, and the current
3334 // safe_point_pos is in between intervals.
3335 // For that reason, cur may be at most the last child.
3336 DCHECK_NOT_NULL(cur);
3337 DCHECK(safe_point_pos >= cur->Start() || range == cur);
3338 bool found = false;
3339 while (!found) {
3340 if (cur->Covers(safe_point_pos)) {
3341 found = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003342 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003343 LiveRange* next = cur->next();
3344 if (next == nullptr || next->Start() > safe_point_pos) {
3345 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003346 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003347 cur = next;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003348 }
3349 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003350
3351 if (!found) {
3352 continue;
3353 }
3354
3355 // Check if the live range is spilled and the safe point is after
3356 // the spill position.
3357 int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3358 ? cur->Start().ToInstructionIndex()
3359 : range->spill_start_index();
3360
3361 if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3362 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
3363 range->vreg(), spill_index, safe_point);
3364 map->RecordReference(AllocatedOperand::cast(spill_operand));
3365 }
3366
3367 if (!cur->spilled()) {
3368 TRACE(
3369 "Pointer in register for range %d:%d (start at %d) "
3370 "at safe point %d\n",
3371 range->vreg(), cur->relative_id(), cur->Start().value(),
3372 safe_point);
3373 InstructionOperand operand = cur->GetAssignedOperand();
3374 DCHECK(!operand.IsStackSlot());
3375 DCHECK_EQ(MachineRepresentation::kTagged,
3376 AllocatedOperand::cast(operand).representation());
3377 map->RecordReference(AllocatedOperand::cast(operand));
3378 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003379 }
3380 }
3381}
3382
3383
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3385 : data_(data) {}
3386
3387
3388bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3389 const InstructionBlock* block) const {
3390 if (block->PredecessorCount() != 1) return false;
3391 return block->predecessors()[0].IsNext(block->rpo_number());
3392}
3393
3394
3395void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003396 // Lazily linearize live ranges in memory for fast lookup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003397 LiveRangeFinder finder(data(), local_zone);
3398 ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3399 for (const InstructionBlock* block : code()->instruction_blocks()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 if (CanEagerlyResolveControlFlow(block)) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003401 BitVector* live = live_in_sets[block->rpo_number().ToInt()];
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003402 BitVector::Iterator iterator(live);
3403 while (!iterator.Done()) {
Ben Murdochc5610432016-08-08 18:44:38 +01003404 int vreg = iterator.Current();
3405 LiveRangeBoundArray* array = finder.ArrayFor(vreg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 for (const RpoNumber& pred : block->predecessors()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003407 FindResult result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003408 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3409 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003410 continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003411 }
3412 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3413 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3414 if (pred_op.Equals(cur_op)) continue;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003415 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3416 // We're doing a reload.
3417 // We don't need to, if:
3418 // 1) there's no register use in this block, and
3419 // 2) the range ends before the block does, and
3420 // 3) we don't have a successor, or the successor is spilled.
3421 LifetimePosition block_start =
3422 LifetimePosition::GapFromInstructionIndex(block->code_start());
3423 LifetimePosition block_end =
3424 LifetimePosition::GapFromInstructionIndex(block->code_end());
3425 const LiveRange* current = result.cur_cover_;
3426 const LiveRange* successor = current->next();
3427 if (current->End() < block_end &&
3428 (successor == nullptr || successor->spilled())) {
3429 // verify point 1: no register use. We can go to the end of the
3430 // range, since it's all within the block.
3431
3432 bool uses_reg = false;
3433 for (const UsePosition* use = current->NextUsePosition(block_start);
3434 use != nullptr; use = use->next()) {
3435 if (use->operand()->IsAnyRegister()) {
3436 uses_reg = true;
3437 break;
3438 }
3439 }
3440 if (!uses_reg) continue;
3441 }
3442 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3443 pred_block->IsDeferred()) {
3444 // The spill location should be defined in pred_block, so add
3445 // pred_block to the list of blocks requiring a spill operand.
3446 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3447 pred_block->rpo_number().ToInt());
3448 }
3449 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003450 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3451 USE(move_loc);
3452 DCHECK_IMPLIES(
3453 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3454 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3455 code()->GetInstructionBlock(move_loc)->IsDeferred());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003456 }
3457 iterator.Advance();
3458 }
3459 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003460
3461 // At this stage, we collected blocks needing a spill operand from
3462 // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3463 // deferred blocks.
3464 for (TopLevelLiveRange* top : data()->live_ranges()) {
3465 if (top == nullptr || top->IsEmpty() ||
3466 !top->IsSpilledOnlyInDeferredBlocks())
3467 continue;
3468 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3469 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003470}
3471
3472
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003473int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3474 const InstructionOperand& cur_op,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003475 const InstructionBlock* pred,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003476 const InstructionOperand& pred_op) {
3477 DCHECK(!pred_op.Equals(cur_op));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003478 int gap_index;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003479 Instruction::GapPosition position;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003480 if (block->PredecessorCount() == 1) {
3481 gap_index = block->first_instruction_index();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003482 position = Instruction::START;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003483 } else {
3484 DCHECK(pred->SuccessorCount() == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003485 DCHECK(!code()
3486 ->InstructionAt(pred->last_instruction_index())
3487 ->HasReferenceMap());
3488 gap_index = pred->last_instruction_index();
3489 position = Instruction::END;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003490 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003491 data()->AddGapMove(gap_index, position, pred_op, cur_op);
3492 return gap_index;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003493}
3494
3495
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003496void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3497 DelayedInsertionMap delayed_insertion_map(local_zone);
3498 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3499 if (top_range == nullptr) continue;
3500 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3501 LiveRange* first_range = top_range;
3502 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3503 first_range = second_range, second_range = second_range->next()) {
3504 LifetimePosition pos = second_range->Start();
3505 // Add gap move if the two live ranges touch and there is no block
3506 // boundary.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003507 if (second_range->spilled()) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003508 if (first_range->End() != pos) continue;
3509 if (data()->IsBlockBoundary(pos) &&
3510 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003511 continue;
3512 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003513 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3514 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3515 if (prev_operand.Equals(cur_operand)) continue;
3516 bool delay_insertion = false;
3517 Instruction::GapPosition gap_pos;
3518 int gap_index = pos.ToInstructionIndex();
Ben Murdoch097c5b22016-05-18 11:27:45 +01003519 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3520 cur_operand.IsAnyRegister()) {
3521 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3522 DCHECK(block->IsDeferred());
3523 // Performing a reload in this block, meaning the spill operand must
3524 // be defined here.
3525 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3526 block->rpo_number().ToInt());
3527 }
3528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003529 if (pos.IsGapPosition()) {
3530 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003531 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003532 if (pos.IsStart()) {
3533 delay_insertion = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003534 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003535 gap_index++;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003536 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003537 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3538 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003539 // Reloads or spills for spilled in deferred blocks ranges must happen
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540 // only in deferred blocks.
3541 DCHECK_IMPLIES(
3542 connect_spilled &&
3543 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3544 code()->GetInstructionBlock(gap_index)->IsDeferred());
3545
3546 ParallelMove* move =
3547 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3548 gap_pos, code_zone());
3549 if (!delay_insertion) {
3550 move->AddMove(prev_operand, cur_operand);
3551 } else {
3552 delayed_insertion_map.insert(
3553 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003554 }
3555 }
3556 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003557 if (delayed_insertion_map.empty()) return;
3558 // Insert all the moves which should occur after the stored move.
3559 ZoneVector<MoveOperands*> to_insert(local_zone);
3560 ZoneVector<MoveOperands*> to_eliminate(local_zone);
3561 to_insert.reserve(4);
3562 to_eliminate.reserve(4);
3563 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3564 for (auto it = delayed_insertion_map.begin();; ++it) {
3565 bool done = it == delayed_insertion_map.end();
3566 if (done || it->first.first != moves) {
3567 // Commit the MoveOperands for current ParallelMove.
3568 for (MoveOperands* move : to_eliminate) {
3569 move->Eliminate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003570 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003571 for (MoveOperands* move : to_insert) {
3572 moves->push_back(move);
3573 }
3574 if (done) break;
3575 // Reset state.
3576 to_eliminate.clear();
3577 to_insert.clear();
3578 moves = it->first.first;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003579 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003580 // Gather all MoveOperands for a single ParallelMove.
3581 MoveOperands* move =
3582 new (code_zone()) MoveOperands(it->first.second, it->second);
3583 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3584 to_insert.push_back(move);
3585 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003586 }
3587}
3588
3589
Ben Murdoch097c5b22016-05-18 11:27:45 +01003590void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3591 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3592 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3593 DCHECK(!range->spilled());
3594
3595 InstructionSequence* code = data()->code();
3596 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3597
3598 TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3599 range->vreg());
3600 // If we have ranges that aren't spilled but require the operand on the stack,
3601 // make sure we insert the spill.
3602 for (const LiveRange* child = range; child != nullptr;
3603 child = child->next()) {
3604 for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3605 pos = pos->next()) {
3606 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3607 continue;
3608 range->AddBlockRequiringSpillOperand(
3609 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3610 ->rpo_number());
3611 }
3612 }
3613
3614 ZoneQueue<int> worklist(temp_zone);
3615
3616 for (BitVector::Iterator iterator(
3617 range->GetListOfBlocksRequiringSpillOperands());
3618 !iterator.Done(); iterator.Advance()) {
3619 worklist.push(iterator.Current());
3620 }
3621
Ben Murdochc5610432016-08-08 18:44:38 +01003622 ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003623 // Seek the deferred blocks that dominate locations requiring spill operands,
3624 // and spill there. We only need to spill at the start of such blocks.
3625 BitVector done_blocks(
3626 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3627 while (!worklist.empty()) {
3628 int block_id = worklist.front();
3629 worklist.pop();
3630 if (done_blocks.Contains(block_id)) continue;
3631 done_blocks.Add(block_id);
Ben Murdochda12d292016-06-02 14:46:10 +01003632 InstructionBlock* spill_block =
Ben Murdoch097c5b22016-05-18 11:27:45 +01003633 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3634
3635 for (const RpoNumber& pred : spill_block->predecessors()) {
3636 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3637
3638 if (pred_block->IsDeferred()) {
3639 worklist.push(pred_block->rpo_number().ToInt());
3640 } else {
3641 LifetimePosition pred_end =
3642 LifetimePosition::InstructionFromInstructionIndex(
3643 pred_block->last_instruction_index());
3644
3645 LiveRangeBound* bound = array->Find(pred_end);
3646
3647 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3648
Ben Murdochc5610432016-08-08 18:44:38 +01003649 RpoNumber spill_block_number = spill_block->rpo_number();
3650 if (done_moves.find(std::make_pair(
3651 spill_block_number, range->vreg())) == done_moves.end()) {
3652 data()->AddGapMove(spill_block->first_instruction_index(),
3653 Instruction::GapPosition::START, pred_op,
3654 spill_operand);
3655 done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
3656 spill_block->mark_needs_frame();
3657 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003658 }
3659 }
3660 }
3661}
3662
3663
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003664} // namespace compiler
3665} // namespace internal
3666} // namespace v8