blob: 02ba1f17c24c640d6865f5c5805ec267b85828a6 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006#include "src/compiler/linkage.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "src/compiler/register-allocator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/string-stream.h"
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#define TRACE(...) \
15 do { \
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
17 } while (false)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020namespace {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040023 auto it = std::find(v->begin(), v->end(), range);
24 DCHECK(it != v->end());
25 v->erase(it);
26}
27
28
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
30 return kind == DOUBLE_REGISTERS ? cfg->num_double_registers()
31 : cfg->num_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032}
33
34
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
36 RegisterKind kind) {
37 return kind == DOUBLE_REGISTERS
38 ? cfg->num_allocatable_aliased_double_registers()
39 : cfg->num_allocatable_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040}
41
42
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
44 RegisterKind kind) {
45 return kind == DOUBLE_REGISTERS ? cfg->allocatable_double_codes()
46 : cfg->allocatable_general_codes();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000047}
48
49
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
51 const InstructionBlock* block) {
52 RpoNumber index = block->loop_header();
53 if (!index.IsValid()) return nullptr;
54 return sequence->InstructionBlockAt(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055}
56
57
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
59 LifetimePosition pos) {
60 return code->GetInstructionBlock(pos.ToInstructionIndex());
61}
62
63
64Instruction* GetLastInstruction(InstructionSequence* code,
65 const InstructionBlock* block) {
66 return code->InstructionAt(block->last_instruction_index());
67}
68
69
70bool IsOutputRegisterOf(Instruction* instr, Register reg) {
71 for (size_t i = 0; i < instr->OutputCount(); i++) {
72 InstructionOperand* output = instr->OutputAt(i);
73 if (output->IsRegister() &&
74 LocationOperand::cast(output)->GetRegister().is(reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075 return true;
76 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 }
78 return false;
79}
80
81
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
83 for (size_t i = 0; i < instr->OutputCount(); i++) {
84 InstructionOperand* output = instr->OutputAt(i);
85 if (output->IsDoubleRegister() &&
86 LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
87 return true;
88 }
89 }
90 return false;
91}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092
93
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094// TODO(dcarney): fix frame to allow frame accesses to half size location.
95int GetByteWidth(MachineRepresentation rep) {
96 switch (rep) {
97 case MachineRepresentation::kBit:
98 case MachineRepresentation::kWord8:
99 case MachineRepresentation::kWord16:
100 case MachineRepresentation::kWord32:
101 case MachineRepresentation::kTagged:
102 return kPointerSize;
103 case MachineRepresentation::kFloat32:
104 case MachineRepresentation::kWord64:
105 case MachineRepresentation::kFloat64:
106 return 8;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100107 case MachineRepresentation::kSimd128:
108 return 16;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 case MachineRepresentation::kNone:
110 break;
111 }
112 UNREACHABLE();
113 return 0;
114}
115
116} // namespace
117
Ben Murdoch097c5b22016-05-18 11:27:45 +0100118class LiveRangeBound {
119 public:
120 explicit LiveRangeBound(LiveRange* range, bool skip)
121 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
122 DCHECK(!range->IsEmpty());
123 }
124
125 bool CanCover(LifetimePosition position) {
126 return start_ <= position && position < end_;
127 }
128
129 LiveRange* const range_;
130 const LifetimePosition start_;
131 const LifetimePosition end_;
132 const bool skip_;
133
134 private:
135 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
136};
137
138
139struct FindResult {
140 LiveRange* cur_cover_;
141 LiveRange* pred_cover_;
142};
143
144
145class LiveRangeBoundArray {
146 public:
147 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
148
149 bool ShouldInitialize() { return start_ == nullptr; }
150
151 void Initialize(Zone* zone, TopLevelLiveRange* range) {
152 length_ = range->GetChildCount();
153
154 start_ = zone->NewArray<LiveRangeBound>(length_);
155 LiveRangeBound* curr = start_;
156 // Normally, spilled ranges do not need connecting moves, because the spill
157 // location has been assigned at definition. For ranges spilled in deferred
158 // blocks, that is not the case, so we need to connect the spilled children.
159 for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
160 new (curr) LiveRangeBound(i, i->spilled());
161 }
162 }
163
164 LiveRangeBound* Find(const LifetimePosition position) const {
165 size_t left_index = 0;
166 size_t right_index = length_;
167 while (true) {
168 size_t current_index = left_index + (right_index - left_index) / 2;
169 DCHECK(right_index > current_index);
170 LiveRangeBound* bound = &start_[current_index];
171 if (bound->start_ <= position) {
172 if (position < bound->end_) return bound;
173 DCHECK(left_index < current_index);
174 left_index = current_index;
175 } else {
176 right_index = current_index;
177 }
178 }
179 }
180
181 LiveRangeBound* FindPred(const InstructionBlock* pred) {
182 LifetimePosition pred_end =
183 LifetimePosition::InstructionFromInstructionIndex(
184 pred->last_instruction_index());
185 return Find(pred_end);
186 }
187
188 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
189 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
190 succ->first_instruction_index());
191 return Find(succ_start);
192 }
193
194 bool FindConnectableSubranges(const InstructionBlock* block,
195 const InstructionBlock* pred,
196 FindResult* result) const {
197 LifetimePosition pred_end =
198 LifetimePosition::InstructionFromInstructionIndex(
199 pred->last_instruction_index());
200 LiveRangeBound* bound = Find(pred_end);
201 result->pred_cover_ = bound->range_;
202 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
203 block->first_instruction_index());
204
205 if (bound->CanCover(cur_start)) {
206 // Both blocks are covered by the same range, so there is nothing to
207 // connect.
208 return false;
209 }
210 bound = Find(cur_start);
211 if (bound->skip_) {
212 return false;
213 }
214 result->cur_cover_ = bound->range_;
215 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
216 return (result->cur_cover_ != result->pred_cover_);
217 }
218
219 private:
220 size_t length_;
221 LiveRangeBound* start_;
222
223 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
224};
225
226
227class LiveRangeFinder {
228 public:
229 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
230 : data_(data),
231 bounds_length_(static_cast<int>(data_->live_ranges().size())),
232 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
233 zone_(zone) {
234 for (int i = 0; i < bounds_length_; ++i) {
235 new (&bounds_[i]) LiveRangeBoundArray();
236 }
237 }
238
239 LiveRangeBoundArray* ArrayFor(int operand_index) {
240 DCHECK(operand_index < bounds_length_);
241 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
242 DCHECK(range != nullptr && !range->IsEmpty());
243 LiveRangeBoundArray* array = &bounds_[operand_index];
244 if (array->ShouldInitialize()) {
245 array->Initialize(zone_, range);
246 }
247 return array;
248 }
249
250 private:
251 const RegisterAllocationData* const data_;
252 const int bounds_length_;
253 LiveRangeBoundArray* const bounds_;
254 Zone* const zone_;
255
256 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
257};
258
259
260typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
261
262
263struct DelayedInsertionMapCompare {
264 bool operator()(const DelayedInsertionMapKey& a,
265 const DelayedInsertionMapKey& b) const {
266 if (a.first == b.first) {
267 return a.second.Compare(b.second);
268 }
269 return a.first < b.first;
270 }
271};
272
273
274typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
275 DelayedInsertionMapCompare> DelayedInsertionMap;
276
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277
278UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
279 void* hint, UsePositionHintType hint_type)
280 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
281 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
282 bool register_beneficial = true;
283 UsePositionType type = UsePositionType::kAny;
284 if (operand_ != nullptr && operand_->IsUnallocated()) {
285 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
286 if (unalloc->HasRegisterPolicy()) {
287 type = UsePositionType::kRequiresRegister;
288 } else if (unalloc->HasSlotPolicy()) {
289 type = UsePositionType::kRequiresSlot;
290 register_beneficial = false;
291 } else {
292 register_beneficial = !unalloc->HasAnyPolicy();
293 }
294 }
295 flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
296 RegisterBeneficialField::encode(register_beneficial) |
297 AssignedRegisterField::encode(kUnassignedRegister);
298 DCHECK(pos_.IsValid());
299}
300
301
302bool UsePosition::HasHint() const {
303 int hint_register;
304 return HintRegister(&hint_register);
305}
306
307
308bool UsePosition::HintRegister(int* register_code) const {
309 if (hint_ == nullptr) return false;
310 switch (HintTypeField::decode(flags_)) {
311 case UsePositionHintType::kNone:
312 case UsePositionHintType::kUnresolved:
313 return false;
314 case UsePositionHintType::kUsePos: {
315 UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
316 int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
317 if (assigned_register == kUnassignedRegister) return false;
318 *register_code = assigned_register;
319 return true;
320 }
321 case UsePositionHintType::kOperand: {
322 InstructionOperand* operand =
323 reinterpret_cast<InstructionOperand*>(hint_);
324 int assigned_register =
325 operand->IsRegister()
326 ? LocationOperand::cast(operand)->GetRegister().code()
327 : LocationOperand::cast(operand)->GetDoubleRegister().code();
328 *register_code = assigned_register;
329 return true;
330 }
331 case UsePositionHintType::kPhi: {
332 RegisterAllocationData::PhiMapValue* phi =
333 reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
334 int assigned_register = phi->assigned_register();
335 if (assigned_register == kUnassignedRegister) return false;
336 *register_code = assigned_register;
337 return true;
338 }
339 }
340 UNREACHABLE();
341 return false;
342}
343
344
345UsePositionHintType UsePosition::HintTypeForOperand(
346 const InstructionOperand& op) {
347 switch (op.kind()) {
348 case InstructionOperand::CONSTANT:
349 case InstructionOperand::IMMEDIATE:
350 case InstructionOperand::EXPLICIT:
351 return UsePositionHintType::kNone;
352 case InstructionOperand::UNALLOCATED:
353 return UsePositionHintType::kUnresolved;
354 case InstructionOperand::ALLOCATED:
355 if (op.IsRegister() || op.IsDoubleRegister()) {
356 return UsePositionHintType::kOperand;
357 } else {
358 DCHECK(op.IsStackSlot() || op.IsDoubleStackSlot());
359 return UsePositionHintType::kNone;
360 }
361 case InstructionOperand::INVALID:
362 break;
363 }
364 UNREACHABLE();
365 return UsePositionHintType::kNone;
366}
367
368
369void UsePosition::ResolveHint(UsePosition* use_pos) {
370 DCHECK_NOT_NULL(use_pos);
371 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
372 hint_ = use_pos;
373 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
374}
375
376
377void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
378 DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
379 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
380 flags_ = TypeField::encode(type) |
381 RegisterBeneficialField::encode(register_beneficial) |
382 HintTypeField::encode(HintTypeField::decode(flags_)) |
383 AssignedRegisterField::encode(kUnassignedRegister);
384}
385
386
387UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
388 DCHECK(Contains(pos) && pos != start());
389 UseInterval* after = new (zone) UseInterval(pos, end_);
390 after->next_ = next_;
391 next_ = nullptr;
392 end_ = pos;
393 return after;
394}
395
396
397void LifetimePosition::Print() const {
398 OFStream os(stdout);
399 os << *this << std::endl;
400}
401
402
403std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
404 os << '@' << pos.ToInstructionIndex();
405 if (pos.IsGapPosition()) {
406 os << 'g';
407 } else {
408 os << 'i';
409 }
410 if (pos.IsStart()) {
411 os << 's';
412 } else {
413 os << 'e';
414 }
415 return os;
416}
417
418
419const float LiveRange::kInvalidWeight = -1;
420const float LiveRange::kMaxWeight = std::numeric_limits<float>::max();
421
422
423LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
424 TopLevelLiveRange* top_level)
425 : relative_id_(relative_id),
426 bits_(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400427 last_interval_(nullptr),
428 first_interval_(nullptr),
429 first_pos_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430 top_level_(top_level),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400431 next_(nullptr),
432 current_interval_(nullptr),
433 last_processed_use_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434 current_hint_position_(nullptr),
435 splitting_pointer_(nullptr),
436 size_(kInvalidSize),
437 weight_(kInvalidWeight),
438 group_(nullptr) {
439 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
440 bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
441 RepresentationField::encode(rep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442}
443
444
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445void LiveRange::VerifyPositions() const {
446 // Walk the positions, verifying that each is in an interval.
447 UseInterval* interval = first_interval_;
448 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
449 CHECK(Start() <= pos->pos());
450 CHECK(pos->pos() <= End());
451 CHECK_NOT_NULL(interval);
452 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
453 interval = interval->next();
454 CHECK_NOT_NULL(interval);
455 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400456 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457}
458
459
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460void LiveRange::VerifyIntervals() const {
461 DCHECK(first_interval()->start() == Start());
462 LifetimePosition last_end = first_interval()->end();
463 for (UseInterval* interval = first_interval()->next(); interval != nullptr;
464 interval = interval->next()) {
465 DCHECK(last_end <= interval->start());
466 last_end = interval->end();
467 }
468 DCHECK(last_end == End());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400469}
470
471
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000472void LiveRange::set_assigned_register(int reg) {
473 DCHECK(!HasRegisterAssigned() && !spilled());
474 bits_ = AssignedRegisterField::update(bits_, reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400475}
476
477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478void LiveRange::UnsetAssignedRegister() {
479 DCHECK(HasRegisterAssigned() && !spilled());
480 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481}
482
483
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000484void LiveRange::Spill() {
485 DCHECK(!spilled());
486 DCHECK(!TopLevel()->HasNoSpillType());
487 set_spilled(true);
488 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
489}
490
491
492RegisterKind LiveRange::kind() const {
493 return IsFloatingPoint(representation()) ? DOUBLE_REGISTERS
494 : GENERAL_REGISTERS;
495}
496
497
498UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
499 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
500 if (pos->HintRegister(register_index)) return pos;
501 }
502 return nullptr;
503}
504
505
506UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 UsePosition* use_pos = last_processed_use_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 if (use_pos == nullptr || use_pos->pos() > start) {
509 use_pos = first_pos();
510 }
511 while (use_pos != nullptr && use_pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 use_pos = use_pos->next();
513 }
514 last_processed_use_ = use_pos;
515 return use_pos;
516}
517
518
519UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521 UsePosition* pos = NextUsePosition(start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400522 while (pos != nullptr && !pos->RegisterIsBeneficial()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000523 pos = pos->next();
524 }
525 return pos;
526}
527
528
529UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530 LifetimePosition start) const {
531 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400532 UsePosition* prev = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 while (pos != nullptr && pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 if (pos->RegisterIsBeneficial()) prev = pos;
535 pos = pos->next();
536 }
537 return prev;
538}
539
540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 UsePosition* pos = NextUsePosition(start);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 pos = pos->next();
545 }
546 return pos;
547}
548
549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
551 for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
552 pos = pos->next()) {
553 if (pos->type() != UsePositionType::kRequiresSlot) continue;
554 return pos;
555 }
556 return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557}
558
559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
561 // We cannot spill a live range that has a use requiring a register
562 // at the current or the immediate next position.
563 UsePosition* use_pos = NextRegisterPosition(pos);
564 if (use_pos == nullptr) return true;
565 return use_pos->pos() > pos.NextStart().End();
566}
567
568
569bool LiveRange::IsTopLevel() const { return top_level_ == this; }
570
571
572InstructionOperand LiveRange::GetAssignedOperand() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000573 if (HasRegisterAssigned()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 DCHECK(!spilled());
575 return AllocatedOperand(LocationOperand::REGISTER, representation(),
576 assigned_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 DCHECK(spilled());
579 DCHECK(!HasRegisterAssigned());
580 if (TopLevel()->HasSpillOperand()) {
581 InstructionOperand* op = TopLevel()->GetSpillOperand();
582 DCHECK(!op->IsUnallocated());
583 return *op;
584 }
585 return TopLevel()->GetSpillRangeOperand();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586}
587
588
589UseInterval* LiveRange::FirstSearchIntervalForPosition(
590 LifetimePosition position) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400591 if (current_interval_ == nullptr) return first_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 if (current_interval_->start() > position) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400593 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 return first_interval_;
595 }
596 return current_interval_;
597}
598
599
600void LiveRange::AdvanceLastProcessedMarker(
601 UseInterval* to_start_of, LifetimePosition but_not_past) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400602 if (to_start_of == nullptr) return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000603 if (to_start_of->start() > but_not_past) return;
604 LifetimePosition start = current_interval_ == nullptr
605 ? LifetimePosition::Invalid()
606 : current_interval_->start();
607 if (to_start_of->start() > start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 current_interval_ = to_start_of;
609 }
610}
611
612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
614 int new_id = TopLevel()->GetNextChildId();
615 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
616 DetachAt(position, child, zone);
617
618 child->top_level_ = TopLevel();
619 child->next_ = next_;
620 next_ = child;
621 return child;
622}
623
624
625UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
626 Zone* zone) {
627 DCHECK(Start() < position);
628 DCHECK(End() > position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 DCHECK(result->IsEmpty());
630 // Find the last interval that ends before the position. If the
631 // position is contained in one of the intervals in the chain, we
632 // split that interval and use the first part.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000633 UseInterval* current = FirstSearchIntervalForPosition(position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634
635 // If the split position coincides with the beginning of a use interval
636 // we need to split use positons in a special way.
637 bool split_at_start = false;
638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 if (current->start() == position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 // When splitting at start we need to locate the previous use interval.
641 current = first_interval_;
642 }
643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 UseInterval* after = nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400645 while (current != nullptr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000646 if (current->Contains(position)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000647 after = current->SplitAt(position, zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 break;
649 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000650 UseInterval* next = current->next();
651 if (next->start() >= position) {
652 split_at_start = (next->start() == position);
653 after = next;
654 current->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 break;
656 }
657 current = next;
658 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000659 DCHECK(nullptr != after);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660
661 // Partition original use intervals to the two live ranges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 UseInterval* before = current;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000663 result->last_interval_ =
664 (last_interval_ == before)
665 ? after // Only interval in the range after split.
666 : last_interval_; // Last interval of the original range.
667 result->first_interval_ = after;
668 last_interval_ = before;
669
670 // Find the last use position before the split and the first use
671 // position after it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000672 UsePosition* use_after =
673 splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
674 ? first_pos()
675 : splitting_pointer_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400676 UsePosition* use_before = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000677 if (split_at_start) {
678 // The split position coincides with the beginning of a use interval (the
679 // end of a lifetime hole). Use at this position should be attributed to
680 // the split child because split child owns use interval covering it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 while (use_after != nullptr && use_after->pos() < position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 use_before = use_after;
683 use_after = use_after->next();
684 }
685 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000686 while (use_after != nullptr && use_after->pos() <= position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687 use_before = use_after;
688 use_after = use_after->next();
689 }
690 }
691
692 // Partition original use positions to the two live ranges.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400693 if (use_before != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 use_before->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400696 first_pos_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 }
698 result->first_pos_ = use_after;
699
700 // Discard cached iteration state. It might be pointing
701 // to the use that no longer belongs to this live range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 last_processed_use_ = nullptr;
703 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705 // Invalidate size and weight of this range. The child range has them
706 // invalid at construction.
707 size_ = kInvalidSize;
708 weight_ = kInvalidWeight;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000710 VerifyChildStructure();
711 result->VerifyChildStructure();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000713 return use_before;
714}
715
716
717void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
718 LiveRange* child = this;
719 for (; child != nullptr; child = child->next()) {
720 child->top_level_ = new_top_level;
721 }
722}
723
724
725void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
726 const InstructionOperand& spill_op) {
727 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
728 DCHECK(Start() <= pos->pos() && pos->pos() <= End());
729 if (!pos->HasOperand()) continue;
730 switch (pos->type()) {
731 case UsePositionType::kRequiresSlot:
732 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
733 InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
734 break;
735 case UsePositionType::kRequiresRegister:
736 DCHECK(op.IsRegister() || op.IsDoubleRegister());
737 // Fall through.
738 case UsePositionType::kAny:
739 InstructionOperand::ReplaceWith(pos->operand(), &op);
740 break;
741 }
742 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743}
744
745
746// This implements an ordering on live ranges so that they are ordered by their
747// start positions. This is needed for the correctness of the register
748// allocation algorithm. If two live ranges start at the same offset then there
749// is a tie breaker based on where the value is first used. This part of the
750// ordering is merely a heuristic.
751bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
752 LifetimePosition start = Start();
753 LifetimePosition other_start = other->Start();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000754 if (start == other_start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400756 if (pos == nullptr) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 UsePosition* other_pos = other->first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 if (other_pos == nullptr) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 return pos->pos() < other_pos->pos();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000760 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 return start < other_start;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762}
763
764
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765void LiveRange::SetUseHints(int register_index) {
766 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
767 if (!pos->HasOperand()) continue;
768 switch (pos->type()) {
769 case UsePositionType::kRequiresSlot:
770 break;
771 case UsePositionType::kRequiresRegister:
772 case UsePositionType::kAny:
773 pos->set_assigned_register(register_index);
774 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776 }
777}
778
779
780bool LiveRange::CanCover(LifetimePosition position) const {
781 if (IsEmpty()) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000782 return Start() <= position && position < End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000783}
784
785
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000786bool LiveRange::Covers(LifetimePosition position) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 if (!CanCover(position)) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000788 UseInterval* start_search = FirstSearchIntervalForPosition(position);
789 for (UseInterval* interval = start_search; interval != nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790 interval = interval->next()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400791 DCHECK(interval->next() == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000792 interval->next()->start() >= interval->start());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 AdvanceLastProcessedMarker(interval, position);
794 if (interval->Contains(position)) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000795 if (interval->start() > position) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 }
797 return false;
798}
799
800
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
802 UseInterval* b = other->first_interval();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400803 if (b == nullptr) return LifetimePosition::Invalid();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 LifetimePosition advance_last_processed_up_to = b->start();
805 UseInterval* a = FirstSearchIntervalForPosition(b->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400806 while (a != nullptr && b != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807 if (a->start() > other->End()) break;
808 if (b->start() > End()) break;
809 LifetimePosition cur_intersection = a->Intersect(b);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810 if (cur_intersection.IsValid()) {
811 return cur_intersection;
812 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000813 if (a->start() < b->start()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 a = a->next();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815 if (a == nullptr || a->start() > other->End()) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
817 } else {
818 b = b->next();
819 }
820 }
821 return LifetimePosition::Invalid();
822}
823
824
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000825unsigned LiveRange::GetSize() {
826 if (size_ == kInvalidSize) {
827 size_ = 0;
828 for (const UseInterval* interval = first_interval(); interval != nullptr;
829 interval = interval->next()) {
830 size_ += (interval->end().value() - interval->start().value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 }
832 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833
834 return static_cast<unsigned>(size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835}
836
837
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000838void LiveRange::Print(const RegisterConfiguration* config,
839 bool with_children) const {
840 OFStream os(stdout);
841 PrintableLiveRange wrapper;
842 wrapper.register_configuration_ = config;
843 for (const LiveRange* i = this; i != nullptr; i = i->next()) {
844 wrapper.range_ = i;
845 os << wrapper << std::endl;
846 if (!with_children) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847 }
848}
849
850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000851void LiveRange::Print(bool with_children) const {
852 const RegisterConfiguration* config =
853 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN);
854 Print(config, with_children);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000855}
856
857
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
859 SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
860 SpillMoveInsertionList* next)
861 : gap_index(gap_index), operand(operand), next(next) {}
862 const int gap_index;
863 InstructionOperand* const operand;
864 SpillMoveInsertionList* const next;
865};
866
867
868TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
869 : LiveRange(0, rep, this),
870 vreg_(vreg),
871 last_child_id_(0),
872 splintered_from_(nullptr),
873 spill_operand_(nullptr),
874 spill_move_insertion_locations_(nullptr),
875 spilled_in_deferred_blocks_(false),
876 spill_start_index_(kMaxInt),
877 last_pos_(nullptr),
878 splinter_(nullptr),
879 has_preassigned_slot_(false) {
880 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
881}
882
883
884#if DEBUG
885int TopLevelLiveRange::debug_virt_reg() const {
886 return IsSplinter() ? splintered_from()->vreg() : vreg();
887}
888#endif
889
890
891void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
892 InstructionOperand* operand) {
893 DCHECK(HasNoSpillType());
894 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
895 gap_index, operand, spill_move_insertion_locations_);
896}
897
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000898void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
899 const InstructionOperand& op,
900 bool might_be_duplicated) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100901 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 Zone* zone = sequence->zone();
903
Ben Murdoch097c5b22016-05-18 11:27:45 +0100904 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905 to_spill != nullptr; to_spill = to_spill->next) {
906 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
907 ParallelMove* move =
908 instr->GetOrCreateParallelMove(Instruction::START, zone);
909 // Skip insertion if it's possible that the move exists already as a
910 // constraint move from a fixed output register to a slot.
911 if (might_be_duplicated || has_preassigned_slot()) {
912 bool found = false;
913 for (MoveOperands* move_op : *move) {
914 if (move_op->IsEliminated()) continue;
915 if (move_op->source().Equals(*to_spill->operand) &&
916 move_op->destination().Equals(op)) {
917 found = true;
918 if (has_preassigned_slot()) move_op->Eliminate();
919 break;
920 }
921 }
922 if (found) continue;
923 }
924 if (!has_preassigned_slot()) {
925 move->AddMove(*to_spill->operand, op);
926 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928}
929
930
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000931void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
932 DCHECK(HasNoSpillType());
933 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
934 set_spill_type(SpillType::kSpillOperand);
935 spill_operand_ = operand;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000936}
937
938
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000939void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
940 DCHECK(!HasSpillOperand());
941 DCHECK(spill_range);
942 spill_range_ = spill_range;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943}
944
945
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
947 SpillRange* spill_range = GetSpillRange();
948 int index = spill_range->assigned_slot();
949 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950}
951
952
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000953void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
954 Zone* zone) {
955 DCHECK(start != Start() || end != End());
956 DCHECK(start < end);
957
958 TopLevelLiveRange splinter_temp(-1, representation());
959 UsePosition* last_in_splinter = nullptr;
960 // Live ranges defined in deferred blocks stay in deferred blocks, so we
961 // don't need to splinter them. That means that start should always be
962 // after the beginning of the range.
963 DCHECK(start > Start());
964
965 if (end >= End()) {
966 DCHECK(start > Start());
967 DetachAt(start, &splinter_temp, zone);
968 next_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000969 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000970 DCHECK(start < End() && Start() < end);
971
972 const int kInvalidId = std::numeric_limits<int>::max();
973
974 UsePosition* last = DetachAt(start, &splinter_temp, zone);
975
976 LiveRange end_part(kInvalidId, this->representation(), nullptr);
977 last_in_splinter = splinter_temp.DetachAt(end, &end_part, zone);
978
979 next_ = end_part.next_;
980 last_interval_->set_next(end_part.first_interval_);
981 // The next splinter will happen either at or after the current interval.
982 // We can optimize DetachAt by setting current_interval_ accordingly,
983 // which will then be picked up by FirstSearchIntervalForPosition.
984 current_interval_ = last_interval_;
985 last_interval_ = end_part.last_interval_;
986
987 if (first_pos_ == nullptr) {
988 first_pos_ = end_part.first_pos_;
989 } else {
990 splitting_pointer_ = last;
991 if (last != nullptr) last->set_next(end_part.first_pos_);
992 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995 if (splinter()->IsEmpty()) {
996 splinter()->first_interval_ = splinter_temp.first_interval_;
997 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 splinter()->last_interval_->set_next(splinter_temp.first_interval_);
1000 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002 if (splinter()->first_pos() == nullptr) {
1003 splinter()->first_pos_ = splinter_temp.first_pos_;
1004 } else {
1005 splinter()->last_pos_->set_next(splinter_temp.first_pos_);
1006 }
1007 if (last_in_splinter != nullptr) {
1008 splinter()->last_pos_ = last_in_splinter;
1009 } else {
1010 if (splinter()->first_pos() != nullptr &&
1011 splinter()->last_pos_ == nullptr) {
1012 splinter()->last_pos_ = splinter()->first_pos();
1013 for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
1014 pos = pos->next()) {
1015 splinter()->last_pos_ = pos;
1016 }
1017 }
1018 }
1019#if DEBUG
1020 Verify();
1021 splinter()->Verify();
1022#endif
1023}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001025
1026void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
1027 splintered_from_ = splinter_parent;
1028 if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
1029 SetSpillRange(splinter_parent->spill_range_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030 }
1031}
1032
1033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001034void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
1035 DCHECK(merged->TopLevel() == this);
1036
1037 if (HasNoSpillType() && merged->HasSpillRange()) {
1038 set_spill_type(merged->spill_type());
1039 DCHECK(GetSpillRange()->live_ranges().size() > 0);
1040 merged->spill_range_ = nullptr;
1041 merged->bits_ =
1042 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001044}
1045
1046
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001047void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
1048 DCHECK(Start() < other->Start());
1049 DCHECK(other->splintered_from() == this);
1050
1051 LiveRange* first = this;
1052 LiveRange* second = other;
1053 DCHECK(first->Start() < second->Start());
1054 while (first != nullptr && second != nullptr) {
1055 DCHECK(first != second);
1056 // Make sure the ranges are in order each time we iterate.
1057 if (second->Start() < first->Start()) {
1058 LiveRange* tmp = second;
1059 second = first;
1060 first = tmp;
1061 continue;
1062 }
1063
1064 if (first->End() <= second->Start()) {
1065 if (first->next() == nullptr ||
1066 first->next()->Start() > second->Start()) {
1067 // First is in order before second.
1068 LiveRange* temp = first->next();
1069 first->next_ = second;
1070 first = temp;
1071 } else {
1072 // First is in order before its successor (or second), so advance first.
1073 first = first->next();
1074 }
1075 continue;
1076 }
1077
1078 DCHECK(first->Start() < second->Start());
1079 // If first and second intersect, split first.
1080 if (first->Start() < second->End() && second->Start() < first->End()) {
1081 LiveRange* temp = first->SplitAt(second->Start(), zone);
1082 CHECK(temp != first);
1083 temp->set_spilled(first->spilled());
1084 if (!temp->spilled())
1085 temp->set_assigned_register(first->assigned_register());
1086
1087 first->next_ = second;
1088 first = temp;
1089 continue;
1090 }
1091 DCHECK(first->End() <= second->Start());
1092 }
1093
1094 TopLevel()->UpdateParentForAllChildren(TopLevel());
1095 TopLevel()->UpdateSpillRangePostMerge(other);
1096
1097#if DEBUG
1098 Verify();
1099#endif
1100}
1101
1102
1103void TopLevelLiveRange::VerifyChildrenInOrder() const {
1104 LifetimePosition last_end = End();
1105 for (const LiveRange* child = this->next(); child != nullptr;
1106 child = child->next()) {
1107 DCHECK(last_end <= child->Start());
1108 last_end = child->End();
1109 }
1110}
1111
1112
1113void TopLevelLiveRange::Verify() const {
1114 VerifyChildrenInOrder();
1115 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1116 VerifyChildStructure();
1117 }
1118}
1119
1120
1121void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1122 TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1123 DCHECK(first_interval_ != nullptr);
1124 DCHECK(first_interval_->start() <= start);
1125 DCHECK(start < first_interval_->end());
1126 first_interval_->set_start(start);
1127}
1128
1129
1130void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1131 LifetimePosition end, Zone* zone) {
1132 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1133 end.value());
1134 LifetimePosition new_end = end;
1135 while (first_interval_ != nullptr && first_interval_->start() <= end) {
1136 if (first_interval_->end() > end) {
1137 new_end = first_interval_->end();
1138 }
1139 first_interval_ = first_interval_->next();
1140 }
1141
1142 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1143 new_interval->set_next(first_interval_);
1144 first_interval_ = new_interval;
1145 if (new_interval->next() == nullptr) {
1146 last_interval_ = new_interval;
1147 }
1148}
1149
1150
1151void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1152 LifetimePosition end, Zone* zone) {
1153 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1154 end.value());
1155 if (first_interval_ == nullptr) {
1156 UseInterval* interval = new (zone) UseInterval(start, end);
1157 first_interval_ = interval;
1158 last_interval_ = interval;
1159 } else {
1160 if (end == first_interval_->start()) {
1161 first_interval_->set_start(start);
1162 } else if (end < first_interval_->start()) {
1163 UseInterval* interval = new (zone) UseInterval(start, end);
1164 interval->set_next(first_interval_);
1165 first_interval_ = interval;
1166 } else {
1167 // Order of instruction's processing (see ProcessInstructions) guarantees
1168 // that each new use interval either precedes or intersects with
1169 // last added interval.
1170 DCHECK(start < first_interval_->end());
1171 first_interval_->set_start(Min(start, first_interval_->start()));
1172 first_interval_->set_end(Max(end, first_interval_->end()));
1173 }
1174 }
1175}
1176
1177
1178void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1179 LifetimePosition pos = use_pos->pos();
1180 TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1181 UsePosition* prev_hint = nullptr;
1182 UsePosition* prev = nullptr;
1183 UsePosition* current = first_pos_;
1184 while (current != nullptr && current->pos() < pos) {
1185 prev_hint = current->HasHint() ? current : prev_hint;
1186 prev = current;
1187 current = current->next();
1188 }
1189
1190 if (prev == nullptr) {
1191 use_pos->set_next(first_pos_);
1192 first_pos_ = use_pos;
1193 } else {
1194 use_pos->set_next(prev->next());
1195 prev->set_next(use_pos);
1196 }
1197
1198 if (prev_hint == nullptr && use_pos->HasHint()) {
1199 current_hint_position_ = use_pos;
1200 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201}
1202
1203
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001204static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1205 UseInterval* interval2) {
1206 while (interval1 != nullptr && interval2 != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207 if (interval1->start() < interval2->start()) {
1208 if (interval1->end() > interval2->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001209 return true;
1210 }
1211 interval1 = interval1->next();
1212 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001213 if (interval2->end() > interval1->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001214 return true;
1215 }
1216 interval2 = interval2->next();
1217 }
1218 }
1219 return false;
1220}
1221
1222
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001223std::ostream& operator<<(std::ostream& os,
1224 const PrintableLiveRange& printable_range) {
1225 const LiveRange* range = printable_range.range_;
1226 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1227 << " ";
1228 if (range->TopLevel()->is_phi()) os << "phi ";
1229 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1230
1231 os << "{" << std::endl;
1232 UseInterval* interval = range->first_interval();
1233 UsePosition* use_pos = range->first_pos();
1234 PrintableInstructionOperand pio;
1235 pio.register_configuration_ = printable_range.register_configuration_;
1236 while (use_pos != nullptr) {
1237 if (use_pos->HasOperand()) {
1238 pio.op_ = *use_pos->operand();
1239 os << pio << use_pos->pos() << " ";
1240 }
1241 use_pos = use_pos->next();
1242 }
1243 os << std::endl;
1244
1245 while (interval != nullptr) {
1246 os << '[' << interval->start() << ", " << interval->end() << ')'
1247 << std::endl;
1248 interval = interval->next();
1249 }
1250 os << "}";
1251 return os;
1252}
1253
1254
1255SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1256 : live_ranges_(zone),
1257 assigned_slot_(kUnassignedSlot),
1258 byte_width_(GetByteWidth(parent->representation())),
1259 kind_(parent->kind()) {
1260 // Spill ranges are created for top level, non-splintered ranges. This is so
1261 // that, when merging decisions are made, we consider the full extent of the
1262 // virtual register, and avoid clobbering it.
1263 DCHECK(!parent->IsSplinter());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001264 UseInterval* result = nullptr;
1265 UseInterval* node = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001266 // Copy the intervals for all ranges.
1267 for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1268 UseInterval* src = range->first_interval();
1269 while (src != nullptr) {
1270 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1271 if (result == nullptr) {
1272 result = new_node;
1273 } else {
1274 node->set_next(new_node);
1275 }
1276 node = new_node;
1277 src = src->next();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001278 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001279 }
1280 use_interval_ = result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001281 live_ranges().push_back(parent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001282 end_position_ = node->end();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283 parent->SetSpillRange(this);
1284}
1285
1286
1287int SpillRange::ByteWidth() const {
1288 return GetByteWidth(live_ranges_[0]->representation());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001289}
1290
1291
1292bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1293 if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 this->End() <= other->use_interval_->start() ||
1295 other->End() <= this->use_interval_->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001296 return false;
1297 }
1298 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1299}
1300
1301
1302bool SpillRange::TryMerge(SpillRange* other) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001303 if (HasSlot() || other->HasSlot()) return false;
1304 // TODO(dcarney): byte widths should be compared here not kinds.
1305 if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() ||
1306 IsIntersectingWith(other)) {
1307 return false;
1308 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001309
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001310 LifetimePosition max = LifetimePosition::MaxPosition();
1311 if (End() < other->End() && other->End() != max) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001312 end_position_ = other->End();
1313 }
1314 other->end_position_ = max;
1315
1316 MergeDisjointIntervals(other->use_interval_);
1317 other->use_interval_ = nullptr;
1318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001319 for (TopLevelLiveRange* range : other->live_ranges()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001320 DCHECK(range->GetSpillRange() == other);
1321 range->SetSpillRange(this);
1322 }
1323
1324 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1325 other->live_ranges().end());
1326 other->live_ranges().clear();
1327
1328 return true;
1329}
1330
1331
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001332void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1333 UseInterval* tail = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001334 UseInterval* current = use_interval_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001335 while (other != nullptr) {
1336 // Make sure the 'current' list starts first
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001337 if (current == nullptr || current->start() > other->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001338 std::swap(current, other);
1339 }
1340 // Check disjointness
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001341 DCHECK(other == nullptr || current->end() <= other->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001342 // Append the 'current' node to the result accumulator and move forward
1343 if (tail == nullptr) {
1344 use_interval_ = current;
1345 } else {
1346 tail->set_next(current);
1347 }
1348 tail = current;
1349 current = current->next();
1350 }
1351 // Other list is empty => we are done
1352}
1353
1354
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355void SpillRange::Print() const {
1356 OFStream os(stdout);
1357 os << "{" << std::endl;
1358 for (TopLevelLiveRange* range : live_ranges()) {
1359 os << range->vreg() << " ";
1360 }
1361 os << std::endl;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001363 for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1364 os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1365 }
1366 os << "}" << std::endl;
1367}
1368
1369
1370RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1371 const InstructionBlock* block,
1372 Zone* zone)
1373 : phi_(phi),
1374 block_(block),
1375 incoming_operands_(zone),
1376 assigned_register_(kUnassignedRegister) {
1377 incoming_operands_.reserve(phi->operands().size());
1378}
1379
1380
1381void RegisterAllocationData::PhiMapValue::AddOperand(
1382 InstructionOperand* operand) {
1383 incoming_operands_.push_back(operand);
1384}
1385
1386
1387void RegisterAllocationData::PhiMapValue::CommitAssignment(
1388 const InstructionOperand& assigned) {
1389 for (InstructionOperand* operand : incoming_operands_) {
1390 InstructionOperand::ReplaceWith(operand, &assigned);
1391 }
1392}
1393
1394
1395RegisterAllocationData::RegisterAllocationData(
1396 const RegisterConfiguration* config, Zone* zone, Frame* frame,
1397 InstructionSequence* code, const char* debug_name)
1398 : allocation_zone_(zone),
1399 frame_(frame),
1400 code_(code),
1401 debug_name_(debug_name),
1402 config_(config),
1403 phi_map_(allocation_zone()),
1404 allocatable_codes_(this->config()->num_general_registers(), -1,
1405 allocation_zone()),
1406 allocatable_double_codes_(this->config()->num_double_registers(), -1,
1407 allocation_zone()),
1408 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1409 live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1410 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1411 allocation_zone()),
1412 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1413 allocation_zone()),
1414 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1415 allocation_zone()),
1416 spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1417 delayed_references_(allocation_zone()),
1418 assigned_registers_(nullptr),
1419 assigned_double_registers_(nullptr),
1420 virtual_register_count_(code->VirtualRegisterCount()),
1421 preassigned_slot_ranges_(zone) {
1422 DCHECK(this->config()->num_general_registers() <=
1423 RegisterConfiguration::kMaxGeneralRegisters);
1424 DCHECK(this->config()->num_double_registers() <=
1425 RegisterConfiguration::kMaxDoubleRegisters);
1426 assigned_registers_ = new (code_zone())
1427 BitVector(this->config()->num_general_registers(), code_zone());
1428 assigned_double_registers_ = new (code_zone())
1429 BitVector(this->config()->num_double_registers(), code_zone());
1430 this->frame()->SetAllocatedRegisters(assigned_registers_);
1431 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1432}
1433
1434
1435MoveOperands* RegisterAllocationData::AddGapMove(
1436 int index, Instruction::GapPosition position,
1437 const InstructionOperand& from, const InstructionOperand& to) {
1438 Instruction* instr = code()->InstructionAt(index);
1439 ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1440 return moves->AddMove(from, to);
1441}
1442
1443
1444MachineRepresentation RegisterAllocationData::RepresentationFor(
1445 int virtual_register) {
1446 DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1447 return code()->GetRepresentation(virtual_register);
1448}
1449
1450
1451TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1452 if (index >= static_cast<int>(live_ranges().size())) {
1453 live_ranges().resize(index + 1, nullptr);
1454 }
1455 TopLevelLiveRange* result = live_ranges()[index];
1456 if (result == nullptr) {
1457 result = NewLiveRange(index, RepresentationFor(index));
1458 live_ranges()[index] = result;
1459 }
1460 return result;
1461}
1462
1463
1464TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1465 int index, MachineRepresentation rep) {
1466 return new (allocation_zone()) TopLevelLiveRange(index, rep);
1467}
1468
1469
1470int RegisterAllocationData::GetNextLiveRangeId() {
1471 int vreg = virtual_register_count_++;
1472 if (vreg >= static_cast<int>(live_ranges().size())) {
1473 live_ranges().resize(vreg + 1, nullptr);
1474 }
1475 return vreg;
1476}
1477
1478
1479TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1480 MachineRepresentation rep) {
1481 int vreg = GetNextLiveRangeId();
1482 TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1483 return ret;
1484}
1485
1486
1487RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1488 const InstructionBlock* block, PhiInstruction* phi) {
1489 RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1490 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1491 auto res =
1492 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1493 DCHECK(res.second);
1494 USE(res);
1495 return map_value;
1496}
1497
1498
1499RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1500 int virtual_register) {
1501 auto it = phi_map_.find(virtual_register);
1502 DCHECK(it != phi_map_.end());
1503 return it->second;
1504}
1505
1506
1507RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1508 TopLevelLiveRange* top_range) {
1509 return GetPhiMapValueFor(top_range->vreg());
1510}
1511
1512
1513bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1514 bool found = false;
1515 BitVector::Iterator iterator(live_in_sets()[0]);
1516 while (!iterator.Done()) {
1517 found = true;
1518 int operand_index = iterator.Current();
1519 PrintF("Register allocator error: live v%d reached first block.\n",
1520 operand_index);
1521 LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1522 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1523 if (debug_name() == nullptr) {
1524 PrintF("\n");
1525 } else {
1526 PrintF(" (function: %s)\n", debug_name());
1527 }
1528 iterator.Advance();
1529 }
1530 return found;
1531}
1532
1533
1534// If a range is defined in a deferred block, we can expect all the range
1535// to only cover positions in deferred blocks. Otherwise, a block on the
1536// hot path would be dominated by a deferred block, meaning it is unreachable
1537// without passing through the deferred block, which is contradictory.
1538// In particular, when such a range contributes a result back on the hot
1539// path, it will be as one of the inputs of a phi. In that case, the value
1540// will be transferred via a move in the Gap::END's of the last instruction
1541// of a deferred block.
1542bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1543 for (const TopLevelLiveRange* range : live_ranges()) {
1544 if (range == nullptr || range->IsEmpty() ||
1545 !code()
1546 ->GetInstructionBlock(range->Start().ToInstructionIndex())
1547 ->IsDeferred()) {
1548 continue;
1549 }
1550 for (const UseInterval* i = range->first_interval(); i != nullptr;
1551 i = i->next()) {
1552 int first = i->FirstGapIndex();
1553 int last = i->LastGapIndex();
1554 for (int instr = first; instr <= last;) {
1555 const InstructionBlock* block = code()->GetInstructionBlock(instr);
1556 if (!block->IsDeferred()) return false;
1557 instr = block->last_instruction_index() + 1;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001558 }
1559 }
1560 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001561 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001562}
1563
1564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001565SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1566 TopLevelLiveRange* range) {
1567 DCHECK(!range->HasSpillOperand());
1568
1569 SpillRange* spill_range = range->GetAllocatedSpillRange();
1570 if (spill_range == nullptr) {
1571 DCHECK(!range->IsSplinter());
1572 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001573 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001575
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576 int spill_range_index =
1577 range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001578
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001579 spill_ranges()[spill_range_index] = spill_range;
1580
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001581 return spill_range;
1582}
1583
1584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1586 TopLevelLiveRange* range) {
1587 DCHECK(!range->HasSpillOperand());
1588 DCHECK(!range->IsSplinter());
1589 SpillRange* spill_range =
1590 new (allocation_zone()) SpillRange(range, allocation_zone());
1591 return spill_range;
1592}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001593
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594
1595void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
1596 if (kind == DOUBLE_REGISTERS) {
1597 assigned_double_registers_->Add(index);
1598 } else {
1599 DCHECK(kind == GENERAL_REGISTERS);
1600 assigned_registers_->Add(index);
1601 }
1602}
1603
1604
1605bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1606 return pos.IsFullStart() &&
1607 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1608 pos.ToInstructionIndex();
1609}
1610
1611
1612ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1613 : data_(data) {}
1614
1615
1616InstructionOperand* ConstraintBuilder::AllocateFixed(
1617 UnallocatedOperand* operand, int pos, bool is_tagged) {
1618 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1619 DCHECK(operand->HasFixedPolicy());
1620 InstructionOperand allocated;
1621 MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1622 int virtual_register = operand->virtual_register();
1623 if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1624 rep = data()->RepresentationFor(virtual_register);
1625 }
1626 if (operand->HasFixedSlotPolicy()) {
1627 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1628 operand->fixed_slot_index());
1629 } else if (operand->HasFixedRegisterPolicy()) {
1630 DCHECK(!IsFloatingPoint(rep));
1631 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1632 operand->fixed_register_index());
1633 } else if (operand->HasFixedDoubleRegisterPolicy()) {
1634 DCHECK(IsFloatingPoint(rep));
1635 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1636 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1637 operand->fixed_register_index());
1638 } else {
1639 UNREACHABLE();
1640 }
1641 InstructionOperand::ReplaceWith(operand, &allocated);
1642 if (is_tagged) {
1643 TRACE("Fixed reg is tagged at %d\n", pos);
1644 Instruction* instr = code()->InstructionAt(pos);
1645 if (instr->HasReferenceMap()) {
1646 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1647 }
1648 }
1649 return operand;
1650}
1651
1652
1653void ConstraintBuilder::MeetRegisterConstraints() {
1654 for (InstructionBlock* block : code()->instruction_blocks()) {
1655 MeetRegisterConstraints(block);
1656 }
1657}
1658
1659
1660void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1661 int start = block->first_instruction_index();
1662 int end = block->last_instruction_index();
1663 DCHECK_NE(-1, start);
1664 for (int i = start; i <= end; ++i) {
1665 MeetConstraintsBefore(i);
1666 if (i != end) MeetConstraintsAfter(i);
1667 }
1668 // Meet register constraints for the instruction in the end.
1669 MeetRegisterConstraintsForLastInstructionInBlock(block);
1670}
1671
1672
1673void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1674 const InstructionBlock* block) {
1675 int end = block->last_instruction_index();
1676 Instruction* last_instruction = code()->InstructionAt(end);
1677 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1678 InstructionOperand* output_operand = last_instruction->OutputAt(i);
1679 DCHECK(!output_operand->IsConstant());
1680 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1681 int output_vreg = output->virtual_register();
1682 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1683 bool assigned = false;
1684 if (output->HasFixedPolicy()) {
1685 AllocateFixed(output, -1, false);
1686 // This value is produced on the stack, we never need to spill it.
1687 if (output->IsStackSlot()) {
1688 DCHECK(LocationOperand::cast(output)->index() <
1689 data()->frame()->GetSpillSlotCount());
1690 range->SetSpillOperand(LocationOperand::cast(output));
1691 range->SetSpillStartIndex(end);
1692 assigned = true;
1693 }
1694
1695 for (const RpoNumber& succ : block->successors()) {
1696 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1697 DCHECK(successor->PredecessorCount() == 1);
1698 int gap_index = successor->first_instruction_index();
1699 // Create an unconstrained operand for the same virtual register
1700 // and insert a gap move from the fixed output to the operand.
1701 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1702 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1703 }
1704 }
1705
1706 if (!assigned) {
1707 for (const RpoNumber& succ : block->successors()) {
1708 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1709 DCHECK(successor->PredecessorCount() == 1);
1710 int gap_index = successor->first_instruction_index();
1711 range->RecordSpillLocation(allocation_zone(), gap_index, output);
1712 range->SetSpillStartIndex(gap_index);
1713 }
1714 }
1715 }
1716}
1717
1718
1719void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1720 Instruction* first = code()->InstructionAt(instr_index);
1721 // Handle fixed temporaries.
1722 for (size_t i = 0; i < first->TempCount(); i++) {
1723 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1724 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1725 }
1726 // Handle constant/fixed output operands.
1727 for (size_t i = 0; i < first->OutputCount(); i++) {
1728 InstructionOperand* output = first->OutputAt(i);
1729 if (output->IsConstant()) {
1730 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1731 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1732 range->SetSpillStartIndex(instr_index + 1);
1733 range->SetSpillOperand(output);
1734 continue;
1735 }
1736 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1737 TopLevelLiveRange* range =
1738 data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1739 bool assigned = false;
1740 if (first_output->HasFixedPolicy()) {
1741 int output_vreg = first_output->virtual_register();
1742 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1743 bool is_tagged = code()->IsReference(output_vreg);
1744 if (first_output->HasSecondaryStorage()) {
1745 range->MarkHasPreassignedSlot();
1746 data()->preassigned_slot_ranges().push_back(
1747 std::make_pair(range, first_output->GetSecondaryStorage()));
1748 }
1749 AllocateFixed(first_output, instr_index, is_tagged);
1750
1751 // This value is produced on the stack, we never need to spill it.
1752 if (first_output->IsStackSlot()) {
1753 DCHECK(LocationOperand::cast(first_output)->index() <
1754 data()->frame()->GetTotalFrameSlotCount());
1755 range->SetSpillOperand(LocationOperand::cast(first_output));
1756 range->SetSpillStartIndex(instr_index + 1);
1757 assigned = true;
1758 }
1759 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1760 output_copy);
1761 }
1762 // Make sure we add a gap move for spilling (if we have not done
1763 // so already).
1764 if (!assigned) {
1765 range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1766 first_output);
1767 range->SetSpillStartIndex(instr_index + 1);
1768 }
1769 }
1770}
1771
1772
1773void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1774 Instruction* second = code()->InstructionAt(instr_index);
1775 // Handle fixed input operands of second instruction.
1776 for (size_t i = 0; i < second->InputCount(); i++) {
1777 InstructionOperand* input = second->InputAt(i);
1778 if (input->IsImmediate() || input->IsExplicit()) {
1779 continue; // Ignore immediates and explicitly reserved registers.
1780 }
1781 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1782 if (cur_input->HasFixedPolicy()) {
1783 int input_vreg = cur_input->virtual_register();
1784 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1785 bool is_tagged = code()->IsReference(input_vreg);
1786 AllocateFixed(cur_input, instr_index, is_tagged);
1787 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1788 }
1789 }
1790 // Handle "output same as input" for second instruction.
1791 for (size_t i = 0; i < second->OutputCount(); i++) {
1792 InstructionOperand* output = second->OutputAt(i);
1793 if (!output->IsUnallocated()) continue;
1794 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1795 if (!second_output->HasSameAsInputPolicy()) continue;
1796 DCHECK(i == 0); // Only valid for first output.
1797 UnallocatedOperand* cur_input =
1798 UnallocatedOperand::cast(second->InputAt(0));
1799 int output_vreg = second_output->virtual_register();
1800 int input_vreg = cur_input->virtual_register();
1801 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1802 cur_input->set_virtual_register(second_output->virtual_register());
1803 MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1804 input_copy, *cur_input);
1805 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1806 if (second->HasReferenceMap()) {
1807 RegisterAllocationData::DelayedReference delayed_reference = {
1808 second->reference_map(), &gap_move->source()};
1809 data()->delayed_references().push_back(delayed_reference);
1810 }
1811 } else if (!code()->IsReference(input_vreg) &&
1812 code()->IsReference(output_vreg)) {
1813 // The input is assumed to immediately have a tagged representation,
1814 // before the pointer map can be used. I.e. the pointer map at the
1815 // instruction will include the output operand (whose value at the
1816 // beginning of the instruction is equal to the input operand). If
1817 // this is not desired, then the pointer map at this instruction needs
1818 // to be adjusted manually.
1819 }
1820 }
1821}
1822
1823
1824void ConstraintBuilder::ResolvePhis() {
1825 // Process the blocks in reverse order.
1826 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1827 ResolvePhis(block);
1828 }
1829}
1830
1831
1832void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1833 for (PhiInstruction* phi : block->phis()) {
1834 int phi_vreg = phi->virtual_register();
1835 RegisterAllocationData::PhiMapValue* map_value =
1836 data()->InitializePhiMap(block, phi);
1837 InstructionOperand& output = phi->output();
1838 // Map the destination operands, so the commitment phase can find them.
1839 for (size_t i = 0; i < phi->operands().size(); ++i) {
1840 InstructionBlock* cur_block =
1841 code()->InstructionBlockAt(block->predecessors()[i]);
1842 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1843 MoveOperands* move = data()->AddGapMove(
1844 cur_block->last_instruction_index(), Instruction::END, input, output);
1845 map_value->AddOperand(&move->destination());
1846 DCHECK(!code()
1847 ->InstructionAt(cur_block->last_instruction_index())
1848 ->HasReferenceMap());
1849 }
1850 TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1851 int gap_index = block->first_instruction_index();
1852 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1853 live_range->SetSpillStartIndex(gap_index);
1854 // We use the phi-ness of some nodes in some later heuristics.
1855 live_range->set_is_phi(true);
1856 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1857 }
1858}
1859
1860
1861LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1862 Zone* local_zone)
1863 : data_(data), phi_hints_(local_zone) {}
1864
1865
1866BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1867 RegisterAllocationData* data) {
1868 size_t block_index = block->rpo_number().ToSize();
1869 BitVector* live_out = data->live_out_sets()[block_index];
1870 if (live_out == nullptr) {
1871 // Compute live out for the given block, except not including backward
1872 // successor edges.
1873 Zone* zone = data->allocation_zone();
1874 const InstructionSequence* code = data->code();
1875
1876 live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1877
1878 // Process all successor blocks.
1879 for (const RpoNumber& succ : block->successors()) {
1880 // Add values live on entry to the successor.
1881 if (succ <= block->rpo_number()) continue;
1882 BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1883 if (live_in != nullptr) live_out->Union(*live_in);
1884
1885 // All phi input operands corresponding to this successor edge are live
1886 // out from this block.
1887 const InstructionBlock* successor = code->InstructionBlockAt(succ);
1888 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1889 DCHECK(index < successor->PredecessorCount());
1890 for (PhiInstruction* phi : successor->phis()) {
1891 live_out->Add(phi->operands()[index]);
1892 }
1893 }
1894 data->live_out_sets()[block_index] = live_out;
1895 }
1896 return live_out;
1897}
1898
1899
1900void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1901 BitVector* live_out) {
1902 // Add an interval that includes the entire block to the live range for
1903 // each live_out value.
1904 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1905 block->first_instruction_index());
1906 LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1907 block->last_instruction_index())
1908 .NextStart();
1909 BitVector::Iterator iterator(live_out);
1910 while (!iterator.Done()) {
1911 int operand_index = iterator.Current();
1912 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1913 range->AddUseInterval(start, end, allocation_zone());
1914 iterator.Advance();
1915 }
1916}
1917
1918
1919int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
1920 return -index - 1 - config()->num_general_registers();
1921}
1922
1923
1924TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1925 DCHECK(index < config()->num_general_registers());
1926 TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1927 if (result == nullptr) {
1928 result = data()->NewLiveRange(FixedLiveRangeID(index),
1929 InstructionSequence::DefaultRepresentation());
1930 DCHECK(result->IsFixed());
1931 result->set_assigned_register(index);
1932 data()->MarkAllocated(GENERAL_REGISTERS, index);
1933 data()->fixed_live_ranges()[index] = result;
1934 }
1935 return result;
1936}
1937
1938
1939TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
1940 DCHECK(index < config()->num_double_registers());
1941 TopLevelLiveRange* result = data()->fixed_double_live_ranges()[index];
1942 if (result == nullptr) {
1943 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index),
1944 MachineRepresentation::kFloat64);
1945 DCHECK(result->IsFixed());
1946 result->set_assigned_register(index);
1947 data()->MarkAllocated(DOUBLE_REGISTERS, index);
1948 data()->fixed_double_live_ranges()[index] = result;
1949 }
1950 return result;
1951}
1952
1953
1954TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1955 if (operand->IsUnallocated()) {
1956 return data()->GetOrCreateLiveRangeFor(
1957 UnallocatedOperand::cast(operand)->virtual_register());
1958 } else if (operand->IsConstant()) {
1959 return data()->GetOrCreateLiveRangeFor(
1960 ConstantOperand::cast(operand)->virtual_register());
1961 } else if (operand->IsRegister()) {
1962 return FixedLiveRangeFor(
1963 LocationOperand::cast(operand)->GetRegister().code());
1964 } else if (operand->IsDoubleRegister()) {
1965 return FixedDoubleLiveRangeFor(
1966 LocationOperand::cast(operand)->GetDoubleRegister().code());
1967 } else {
1968 return nullptr;
1969 }
1970}
1971
1972
1973UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
1974 InstructionOperand* operand,
1975 void* hint,
1976 UsePositionHintType hint_type) {
1977 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1978}
1979
1980
1981UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1982 InstructionOperand* operand, void* hint,
1983 UsePositionHintType hint_type) {
1984 TopLevelLiveRange* range = LiveRangeFor(operand);
1985 if (range == nullptr) return nullptr;
1986
1987 if (range->IsEmpty() || range->Start() > position) {
1988 // Can happen if there is a definition without use.
1989 range->AddUseInterval(position, position.NextStart(), allocation_zone());
1990 range->AddUsePosition(NewUsePosition(position.NextStart()));
1991 } else {
1992 range->ShortenTo(position);
1993 }
1994 if (!operand->IsUnallocated()) return nullptr;
1995 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1996 UsePosition* use_pos =
1997 NewUsePosition(position, unalloc_operand, hint, hint_type);
1998 range->AddUsePosition(use_pos);
1999 return use_pos;
2000}
2001
2002
2003UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
2004 LifetimePosition position,
2005 InstructionOperand* operand, void* hint,
2006 UsePositionHintType hint_type) {
2007 TopLevelLiveRange* range = LiveRangeFor(operand);
2008 if (range == nullptr) return nullptr;
2009 UsePosition* use_pos = nullptr;
2010 if (operand->IsUnallocated()) {
2011 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2012 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2013 range->AddUsePosition(use_pos);
2014 }
2015 range->AddUseInterval(block_start, position, allocation_zone());
2016 return use_pos;
2017}
2018
2019
2020void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2021 BitVector* live) {
2022 int block_start = block->first_instruction_index();
2023 LifetimePosition block_start_position =
2024 LifetimePosition::GapFromInstructionIndex(block_start);
2025
2026 for (int index = block->last_instruction_index(); index >= block_start;
2027 index--) {
2028 LifetimePosition curr_position =
2029 LifetimePosition::InstructionFromInstructionIndex(index);
2030 Instruction* instr = code()->InstructionAt(index);
2031 DCHECK(instr != nullptr);
2032 DCHECK(curr_position.IsInstructionPosition());
2033 // Process output, inputs, and temps of this instruction.
2034 for (size_t i = 0; i < instr->OutputCount(); i++) {
2035 InstructionOperand* output = instr->OutputAt(i);
2036 if (output->IsUnallocated()) {
2037 // Unsupported.
2038 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2039 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2040 live->Remove(out_vreg);
2041 } else if (output->IsConstant()) {
2042 int out_vreg = ConstantOperand::cast(output)->virtual_register();
2043 live->Remove(out_vreg);
2044 }
2045 if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2046 output->IsRegister() &&
2047 AllocatedOperand::cast(output)->GetRegister().is(
2048 v8::internal::kReturnRegister0)) {
2049 // The register defined here is blocked from gap start - it is the
2050 // exception value.
2051 // TODO(mtrofin): should we explore an explicit opcode for
2052 // the first instruction in the handler?
2053 Define(LifetimePosition::GapFromInstructionIndex(index), output);
2054 } else {
2055 Define(curr_position, output);
2056 }
2057 }
2058
2059 if (instr->ClobbersRegisters()) {
2060 for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2061 int code = config()->GetAllocatableGeneralCode(i);
2062 if (!IsOutputRegisterOf(instr, Register::from_code(code))) {
2063 TopLevelLiveRange* range = FixedLiveRangeFor(code);
2064 range->AddUseInterval(curr_position, curr_position.End(),
2065 allocation_zone());
2066 }
2067 }
2068 }
2069
2070 if (instr->ClobbersDoubleRegisters()) {
2071 for (int i = 0; i < config()->num_allocatable_aliased_double_registers();
2072 ++i) {
2073 int code = config()->GetAllocatableDoubleCode(i);
2074 if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) {
2075 TopLevelLiveRange* range = FixedDoubleLiveRangeFor(code);
2076 range->AddUseInterval(curr_position, curr_position.End(),
2077 allocation_zone());
2078 }
2079 }
2080 }
2081
2082 for (size_t i = 0; i < instr->InputCount(); i++) {
2083 InstructionOperand* input = instr->InputAt(i);
2084 if (input->IsImmediate() || input->IsExplicit()) {
2085 continue; // Ignore immediates and explicitly reserved registers.
2086 }
2087 LifetimePosition use_pos;
2088 if (input->IsUnallocated() &&
2089 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2090 use_pos = curr_position;
2091 } else {
2092 use_pos = curr_position.End();
2093 }
2094
2095 if (input->IsUnallocated()) {
2096 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2097 int vreg = unalloc->virtual_register();
2098 live->Add(vreg);
2099 if (unalloc->HasSlotPolicy()) {
2100 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2101 }
2102 }
2103 Use(block_start_position, use_pos, input);
2104 }
2105
2106 for (size_t i = 0; i < instr->TempCount(); i++) {
2107 InstructionOperand* temp = instr->TempAt(i);
2108 // Unsupported.
2109 DCHECK_IMPLIES(temp->IsUnallocated(),
2110 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2111 if (instr->ClobbersTemps()) {
2112 if (temp->IsRegister()) continue;
2113 if (temp->IsUnallocated()) {
2114 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2115 if (temp_unalloc->HasFixedPolicy()) {
2116 continue;
2117 }
2118 }
2119 }
2120 Use(block_start_position, curr_position.End(), temp);
2121 Define(curr_position, temp);
2122 }
2123
2124 // Process the moves of the instruction's gaps, making their sources live.
2125 const Instruction::GapPosition kPositions[] = {Instruction::END,
2126 Instruction::START};
2127 curr_position = curr_position.PrevStart();
2128 DCHECK(curr_position.IsGapPosition());
2129 for (const Instruction::GapPosition& position : kPositions) {
2130 ParallelMove* move = instr->GetParallelMove(position);
2131 if (move == nullptr) continue;
2132 if (position == Instruction::END) {
2133 curr_position = curr_position.End();
2134 } else {
2135 curr_position = curr_position.Start();
2136 }
2137 for (MoveOperands* cur : *move) {
2138 InstructionOperand& from = cur->source();
2139 InstructionOperand& to = cur->destination();
2140 void* hint = &to;
2141 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2142 UsePosition* to_use = nullptr;
2143 int phi_vreg = -1;
2144 if (to.IsUnallocated()) {
2145 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2146 TopLevelLiveRange* to_range =
2147 data()->GetOrCreateLiveRangeFor(to_vreg);
2148 if (to_range->is_phi()) {
2149 phi_vreg = to_vreg;
2150 if (to_range->is_non_loop_phi()) {
2151 hint = to_range->current_hint_position();
2152 hint_type = hint == nullptr ? UsePositionHintType::kNone
2153 : UsePositionHintType::kUsePos;
2154 } else {
2155 hint_type = UsePositionHintType::kPhi;
2156 hint = data()->GetPhiMapValueFor(to_vreg);
2157 }
2158 } else {
2159 if (live->Contains(to_vreg)) {
2160 to_use = Define(curr_position, &to, &from,
2161 UsePosition::HintTypeForOperand(from));
2162 live->Remove(to_vreg);
2163 } else {
2164 cur->Eliminate();
2165 continue;
2166 }
2167 }
2168 } else {
2169 Define(curr_position, &to);
2170 }
2171 UsePosition* from_use =
2172 Use(block_start_position, curr_position, &from, hint, hint_type);
2173 // Mark range live.
2174 if (from.IsUnallocated()) {
2175 live->Add(UnallocatedOperand::cast(from).virtual_register());
2176 }
2177 // Resolve use position hints just created.
2178 if (to_use != nullptr && from_use != nullptr) {
2179 to_use->ResolveHint(from_use);
2180 from_use->ResolveHint(to_use);
2181 }
2182 DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2183 DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2184 // Potentially resolve phi hint.
2185 if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2186 }
2187 }
2188 }
2189}
2190
2191
2192void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2193 BitVector* live) {
2194 for (PhiInstruction* phi : block->phis()) {
2195 // The live range interval already ends at the first instruction of the
2196 // block.
2197 int phi_vreg = phi->virtual_register();
2198 live->Remove(phi_vreg);
2199 InstructionOperand* hint = nullptr;
2200 Instruction* instr = GetLastInstruction(
2201 code(), code()->InstructionBlockAt(block->predecessors()[0]));
2202 for (MoveOperands* move : *instr->GetParallelMove(Instruction::END)) {
2203 InstructionOperand& to = move->destination();
2204 if (to.IsUnallocated() &&
2205 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2206 hint = &move->source();
2207 break;
2208 }
2209 }
2210 DCHECK(hint != nullptr);
2211 LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2212 block->first_instruction_index());
2213 UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2214 UsePosition::HintTypeForOperand(*hint));
2215 MapPhiHint(hint, use_pos);
2216 }
2217}
2218
2219
2220void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2221 BitVector* live) {
2222 DCHECK(block->IsLoopHeader());
2223 // Add a live range stretching from the first loop instruction to the last
2224 // for each value live on entry to the header.
2225 BitVector::Iterator iterator(live);
2226 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2227 block->first_instruction_index());
2228 LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2229 code()->LastLoopInstructionIndex(block))
2230 .NextFullStart();
2231 while (!iterator.Done()) {
2232 int operand_index = iterator.Current();
2233 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2234 range->EnsureInterval(start, end, allocation_zone());
2235 iterator.Advance();
2236 }
2237 // Insert all values into the live in sets of all blocks in the loop.
2238 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2239 ++i) {
2240 live_in_sets()[i]->Union(*live);
2241 }
2242}
2243
2244
2245void LiveRangeBuilder::BuildLiveRanges() {
2246 // Process the blocks in reverse order.
2247 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2248 --block_id) {
2249 InstructionBlock* block =
2250 code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2251 BitVector* live = ComputeLiveOut(block, data());
2252 // Initially consider all live_out values live for the entire block. We
2253 // will shorten these intervals if necessary.
2254 AddInitialIntervals(block, live);
2255 // Process the instructions in reverse order, generating and killing
2256 // live values.
2257 ProcessInstructions(block, live);
2258 // All phi output operands are killed by this block.
2259 ProcessPhis(block, live);
2260 // Now live is live_in for this block except not including values live
2261 // out on backward successor edges.
2262 if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2263 live_in_sets()[block_id] = live;
2264 }
2265 // Postprocess the ranges.
2266 for (TopLevelLiveRange* range : data()->live_ranges()) {
2267 if (range == nullptr) continue;
2268 // Give slots to all ranges with a non fixed slot use.
2269 if (range->has_slot_use() && range->HasNoSpillType()) {
2270 data()->AssignSpillRangeToLiveRange(range);
2271 }
2272 // TODO(bmeurer): This is a horrible hack to make sure that for constant
2273 // live ranges, every use requires the constant to be in a register.
2274 // Without this hack, all uses with "any" policy would get the constant
2275 // operand assigned.
2276 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2277 for (UsePosition* pos = range->first_pos(); pos != nullptr;
2278 pos = pos->next()) {
2279 if (pos->type() == UsePositionType::kRequiresSlot) continue;
2280 UsePositionType new_type = UsePositionType::kAny;
2281 // Can't mark phis as needing a register.
2282 if (!pos->pos().IsGapPosition()) {
2283 new_type = UsePositionType::kRequiresRegister;
2284 }
2285 pos->set_type(new_type, true);
2286 }
2287 }
2288 }
2289 for (auto preassigned : data()->preassigned_slot_ranges()) {
2290 TopLevelLiveRange* range = preassigned.first;
2291 int slot_id = preassigned.second;
2292 SpillRange* spill = range->HasSpillRange()
2293 ? range->GetSpillRange()
2294 : data()->AssignSpillRangeToLiveRange(range);
2295 spill->set_assigned_slot(slot_id);
2296 }
2297#ifdef DEBUG
2298 Verify();
2299#endif
2300}
2301
2302
2303void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2304 UsePosition* use_pos) {
2305 DCHECK(!use_pos->IsResolved());
2306 auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2307 DCHECK(res.second);
2308 USE(res);
2309}
2310
2311
2312void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2313 UsePosition* use_pos) {
2314 auto it = phi_hints_.find(operand);
2315 if (it == phi_hints_.end()) return;
2316 DCHECK(!it->second->IsResolved());
2317 it->second->ResolveHint(use_pos);
2318}
2319
2320
2321void LiveRangeBuilder::Verify() const {
2322 for (auto& hint : phi_hints_) {
2323 CHECK(hint.second->IsResolved());
2324 }
2325 for (TopLevelLiveRange* current : data()->live_ranges()) {
2326 if (current != nullptr && !current->IsEmpty()) current->Verify();
2327 }
2328}
2329
2330
2331RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2332 RegisterKind kind)
2333 : data_(data),
2334 mode_(kind),
2335 num_registers_(GetRegisterCount(data->config(), kind)),
2336 num_allocatable_registers_(
2337 GetAllocatableRegisterCount(data->config(), kind)),
2338 allocatable_register_codes_(
2339 GetAllocatableRegisterCodes(data->config(), kind)) {}
2340
2341
2342LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2343 const LiveRange* range, int instruction_index) {
2344 LifetimePosition ret = LifetimePosition::Invalid();
2345
2346 ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2347 if (range->Start() >= ret || ret >= range->End()) {
2348 return LifetimePosition::Invalid();
2349 }
2350 return ret;
2351}
2352
2353
2354void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand(
2355 bool operands_only) {
2356 size_t initial_range_count = data()->live_ranges().size();
2357 for (size_t i = 0; i < initial_range_count; ++i) {
2358 TopLevelLiveRange* range = data()->live_ranges()[i];
2359 if (!CanProcessRange(range)) continue;
2360 if (range->HasNoSpillType() || (operands_only && range->HasSpillRange())) {
2361 continue;
2362 }
2363
2364 LifetimePosition start = range->Start();
2365 TRACE("Live range %d:%d is defined by a spill operand.\n",
2366 range->TopLevel()->vreg(), range->relative_id());
2367 LifetimePosition next_pos = start;
2368 if (next_pos.IsGapPosition()) {
2369 next_pos = next_pos.NextStart();
2370 }
2371 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2372 // If the range already has a spill operand and it doesn't need a
2373 // register immediately, split it and spill the first part of the range.
2374 if (pos == nullptr) {
2375 Spill(range);
2376 } else if (pos->pos() > range->Start().NextStart()) {
2377 // Do not spill live range eagerly if use position that can benefit from
2378 // the register is too close to the start of live range.
2379 LifetimePosition split_pos = GetSplitPositionForInstruction(
2380 range, pos->pos().ToInstructionIndex());
2381 // There is no place to split, so we can't split and spill.
2382 if (!split_pos.IsValid()) continue;
2383
2384 split_pos =
2385 FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2386
2387 SplitRangeAt(range, split_pos);
2388 Spill(range);
2389 }
2390 }
2391}
2392
2393
2394LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2395 LifetimePosition pos) {
2396 DCHECK(!range->TopLevel()->IsFixed());
2397 TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2398 range->relative_id(), pos.value());
2399
2400 if (pos <= range->Start()) return range;
2401
2402 // We can't properly connect liveranges if splitting occurred at the end
2403 // a block.
2404 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2405 (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2406 pos.ToInstructionIndex()));
2407
2408 LiveRange* result = range->SplitAt(pos, allocation_zone());
2409 return result;
2410}
2411
2412
2413LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2414 LifetimePosition start,
2415 LifetimePosition end) {
2416 DCHECK(!range->TopLevel()->IsFixed());
2417 TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2418 range->TopLevel()->vreg(), range->relative_id(), start.value(),
2419 end.value());
2420
2421 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2422 DCHECK(split_pos >= start);
2423 return SplitRangeAt(range, split_pos);
2424}
2425
2426
2427LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2428 LifetimePosition end) {
2429 int start_instr = start.ToInstructionIndex();
2430 int end_instr = end.ToInstructionIndex();
2431 DCHECK(start_instr <= end_instr);
2432
2433 // We have no choice
2434 if (start_instr == end_instr) return end;
2435
2436 const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2437 const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2438
2439 if (end_block == start_block) {
2440 // The interval is split in the same basic block. Split at the latest
2441 // possible position.
2442 return end;
2443 }
2444
2445 const InstructionBlock* block = end_block;
2446 // Find header of outermost loop.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002447 do {
2448 const InstructionBlock* loop = GetContainingLoop(code(), block);
2449 if (loop == nullptr ||
2450 loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2451 // No more loops or loop starts before the lifetime start.
2452 break;
2453 }
2454 block = loop;
2455 } while (true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002456
2457 // We did not find any suitable outer loop. Split at the latest possible
2458 // position unless end_block is a loop header itself.
2459 if (block == end_block && !end_block->IsLoopHeader()) return end;
2460
2461 return LifetimePosition::GapFromInstructionIndex(
2462 block->first_instruction_index());
2463}
2464
2465
2466LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2467 LiveRange* range, LifetimePosition pos) {
2468 const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2469 const InstructionBlock* loop_header =
2470 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2471
2472 if (loop_header == nullptr) return pos;
2473
2474 const UsePosition* prev_use =
2475 range->PreviousUsePositionRegisterIsBeneficial(pos);
2476
2477 while (loop_header != nullptr) {
2478 // We are going to spill live range inside the loop.
2479 // If possible try to move spilling position backwards to loop header.
2480 // This will reduce number of memory moves on the back edge.
2481 LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2482 loop_header->first_instruction_index());
2483
2484 if (range->Covers(loop_start)) {
2485 if (prev_use == nullptr || prev_use->pos() < loop_start) {
2486 // No register beneficial use inside the loop before the pos.
2487 pos = loop_start;
2488 }
2489 }
2490
2491 // Try hoisting out to an outer loop.
2492 loop_header = GetContainingLoop(code(), loop_header);
2493 }
2494
2495 return pos;
2496}
2497
2498
2499void RegisterAllocator::Spill(LiveRange* range) {
2500 DCHECK(!range->spilled());
2501 TopLevelLiveRange* first = range->TopLevel();
2502 TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2503
2504 if (first->HasNoSpillType()) {
2505 data()->AssignSpillRangeToLiveRange(first);
2506 }
2507 range->Spill();
2508}
2509
2510
2511const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters()
2512 const {
2513 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges()
2514 : data()->fixed_live_ranges();
2515}
2516
2517
2518const char* RegisterAllocator::RegisterName(int register_code) const {
2519 if (mode() == GENERAL_REGISTERS) {
2520 return data()->config()->GetGeneralRegisterName(register_code);
2521 } else {
2522 return data()->config()->GetDoubleRegisterName(register_code);
2523 }
2524}
2525
2526
2527LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2528 RegisterKind kind, Zone* local_zone)
2529 : RegisterAllocator(data, kind),
2530 unhandled_live_ranges_(local_zone),
2531 active_live_ranges_(local_zone),
2532 inactive_live_ranges_(local_zone) {
2533 unhandled_live_ranges().reserve(
2534 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
2535 active_live_ranges().reserve(8);
2536 inactive_live_ranges().reserve(8);
2537 // TryAllocateFreeReg and AllocateBlockedReg assume this
2538 // when allocating local arrays.
2539 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
2540 this->data()->config()->num_general_registers());
2541}
2542
2543
2544void LinearScanAllocator::AllocateRegisters() {
2545 DCHECK(unhandled_live_ranges().empty());
2546 DCHECK(active_live_ranges().empty());
2547 DCHECK(inactive_live_ranges().empty());
2548
2549 SplitAndSpillRangesDefinedByMemoryOperand(code()->VirtualRegisterCount() <=
2550 num_allocatable_registers());
2551
2552 for (TopLevelLiveRange* range : data()->live_ranges()) {
2553 if (!CanProcessRange(range)) continue;
2554 for (LiveRange* to_add = range; to_add != nullptr;
2555 to_add = to_add->next()) {
2556 if (!to_add->spilled()) {
2557 AddToUnhandledUnsorted(to_add);
2558 }
2559 }
2560 }
2561 SortUnhandled();
2562 DCHECK(UnhandledIsSorted());
2563
2564 auto& fixed_ranges = GetFixedRegisters();
2565 for (TopLevelLiveRange* current : fixed_ranges) {
2566 if (current != nullptr) {
2567 DCHECK_EQ(mode(), current->kind());
2568 AddToInactive(current);
2569 }
2570 }
2571
2572 while (!unhandled_live_ranges().empty()) {
2573 DCHECK(UnhandledIsSorted());
2574 LiveRange* current = unhandled_live_ranges().back();
2575 unhandled_live_ranges().pop_back();
2576 DCHECK(UnhandledIsSorted());
2577 LifetimePosition position = current->Start();
2578#ifdef DEBUG
2579 allocation_finger_ = position;
2580#endif
2581 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2582 current->relative_id(), position.value());
2583
2584 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2585 continue;
2586
2587 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2588 LiveRange* cur_active = active_live_ranges()[i];
2589 if (cur_active->End() <= position) {
2590 ActiveToHandled(cur_active);
2591 --i; // The live range was removed from the list of active live ranges.
2592 } else if (!cur_active->Covers(position)) {
2593 ActiveToInactive(cur_active);
2594 --i; // The live range was removed from the list of active live ranges.
2595 }
2596 }
2597
2598 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2599 LiveRange* cur_inactive = inactive_live_ranges()[i];
2600 if (cur_inactive->End() <= position) {
2601 InactiveToHandled(cur_inactive);
2602 --i; // Live range was removed from the list of inactive live ranges.
2603 } else if (cur_inactive->Covers(position)) {
2604 InactiveToActive(cur_inactive);
2605 --i; // Live range was removed from the list of inactive live ranges.
2606 }
2607 }
2608
2609 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2610
2611 bool result = TryAllocateFreeReg(current);
2612 if (!result) AllocateBlockedReg(current);
2613 if (current->HasRegisterAssigned()) {
2614 AddToActive(current);
2615 }
2616 }
2617}
2618
2619
2620void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2621 int reg) {
2622 data()->MarkAllocated(range->kind(), reg);
2623 range->set_assigned_register(reg);
2624 range->SetUseHints(reg);
2625 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2626 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2627 }
2628}
2629
2630
2631void LinearScanAllocator::AddToActive(LiveRange* range) {
2632 TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
2633 range->relative_id());
2634 active_live_ranges().push_back(range);
2635}
2636
2637
2638void LinearScanAllocator::AddToInactive(LiveRange* range) {
2639 TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
2640 range->relative_id());
2641 inactive_live_ranges().push_back(range);
2642}
2643
2644
2645void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2646 if (range == nullptr || range->IsEmpty()) return;
2647 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2648 DCHECK(allocation_finger_ <= range->Start());
2649 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2650 --i) {
2651 LiveRange* cur_range = unhandled_live_ranges().at(i);
2652 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2653 TRACE("Add live range %d:%d to unhandled at %d\n",
2654 range->TopLevel()->vreg(), range->relative_id(), i + 1);
2655 auto it = unhandled_live_ranges().begin() + (i + 1);
2656 unhandled_live_ranges().insert(it, range);
2657 DCHECK(UnhandledIsSorted());
2658 return;
2659 }
2660 TRACE("Add live range %d:%d to unhandled at start\n",
2661 range->TopLevel()->vreg(), range->relative_id());
2662 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2663 DCHECK(UnhandledIsSorted());
2664}
2665
2666
2667void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2668 if (range == nullptr || range->IsEmpty()) return;
2669 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2670 TRACE("Add live range %d:%d to unhandled unsorted at end\n",
2671 range->TopLevel()->vreg(), range->relative_id());
2672 unhandled_live_ranges().push_back(range);
2673}
2674
2675
2676static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2677 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2678 if (a->ShouldBeAllocatedBefore(b)) return false;
2679 if (b->ShouldBeAllocatedBefore(a)) return true;
2680 return a->TopLevel()->vreg() < b->TopLevel()->vreg();
2681}
2682
2683
2684// Sort the unhandled live ranges so that the ranges to be processed first are
2685// at the end of the array list. This is convenient for the register allocation
2686// algorithm because it is efficient to remove elements from the end.
2687void LinearScanAllocator::SortUnhandled() {
2688 TRACE("Sort unhandled\n");
2689 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2690 &UnhandledSortHelper);
2691}
2692
2693
2694bool LinearScanAllocator::UnhandledIsSorted() {
2695 size_t len = unhandled_live_ranges().size();
2696 for (size_t i = 1; i < len; i++) {
2697 LiveRange* a = unhandled_live_ranges().at(i - 1);
2698 LiveRange* b = unhandled_live_ranges().at(i);
2699 if (a->Start() < b->Start()) return false;
2700 }
2701 return true;
2702}
2703
2704
2705void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2706 RemoveElement(&active_live_ranges(), range);
2707 TRACE("Moving live range %d:%d from active to handled\n",
2708 range->TopLevel()->vreg(), range->relative_id());
2709}
2710
2711
2712void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2713 RemoveElement(&active_live_ranges(), range);
2714 inactive_live_ranges().push_back(range);
2715 TRACE("Moving live range %d:%d from active to inactive\n",
2716 range->TopLevel()->vreg(), range->relative_id());
2717}
2718
2719
2720void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2721 RemoveElement(&inactive_live_ranges(), range);
2722 TRACE("Moving live range %d:%d from inactive to handled\n",
2723 range->TopLevel()->vreg(), range->relative_id());
2724}
2725
2726
2727void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2728 RemoveElement(&inactive_live_ranges(), range);
2729 active_live_ranges().push_back(range);
2730 TRACE("Moving live range %d:%d from inactive to active\n",
2731 range->TopLevel()->vreg(), range->relative_id());
2732}
2733
2734
2735bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2736 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2737
2738 for (int i = 0; i < num_registers(); i++) {
2739 free_until_pos[i] = LifetimePosition::MaxPosition();
2740 }
2741
2742 for (LiveRange* cur_active : active_live_ranges()) {
2743 free_until_pos[cur_active->assigned_register()] =
2744 LifetimePosition::GapFromInstructionIndex(0);
2745 TRACE("Register %s is free until pos %d (1)\n",
2746 RegisterName(cur_active->assigned_register()),
2747 LifetimePosition::GapFromInstructionIndex(0).value());
2748 }
2749
2750 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2751 DCHECK(cur_inactive->End() > current->Start());
2752 LifetimePosition next_intersection =
2753 cur_inactive->FirstIntersection(current);
2754 if (!next_intersection.IsValid()) continue;
2755 int cur_reg = cur_inactive->assigned_register();
2756 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
2757 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2758 Min(free_until_pos[cur_reg], next_intersection).value());
2759 }
2760
2761 int hint_register;
2762 if (current->FirstHintPosition(&hint_register) != nullptr) {
2763 TRACE(
2764 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
2765 RegisterName(hint_register), free_until_pos[hint_register].value(),
2766 current->TopLevel()->vreg(), current->relative_id(),
2767 current->End().value());
2768
2769 // The desired register is free until the end of the current live range.
2770 if (free_until_pos[hint_register] >= current->End()) {
2771 TRACE("Assigning preferred reg %s to live range %d:%d\n",
2772 RegisterName(hint_register), current->TopLevel()->vreg(),
2773 current->relative_id());
2774 SetLiveRangeAssignedRegister(current, hint_register);
2775 return true;
2776 }
2777 }
2778
2779 // Find the register which stays free for the longest time.
2780 int reg = allocatable_register_code(0);
2781 for (int i = 1; i < num_allocatable_registers(); ++i) {
2782 int code = allocatable_register_code(i);
2783 if (free_until_pos[code] > free_until_pos[reg]) {
2784 reg = code;
2785 }
2786 }
2787
2788 LifetimePosition pos = free_until_pos[reg];
2789
2790 if (pos <= current->Start()) {
2791 // All registers are blocked.
2792 return false;
2793 }
2794
2795 if (pos < current->End()) {
2796 // Register reg is available at the range start but becomes blocked before
2797 // the range end. Split current at position where it becomes blocked.
2798 LiveRange* tail = SplitRangeAt(current, pos);
2799 AddToUnhandledSorted(tail);
2800 }
2801
2802 // Register reg is available at the range start and is free until
2803 // the range end.
2804 DCHECK(pos >= current->End());
2805 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2806 current->TopLevel()->vreg(), current->relative_id());
2807 SetLiveRangeAssignedRegister(current, reg);
2808
2809 return true;
2810}
2811
2812
2813void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2814 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2815 if (register_use == nullptr) {
2816 // There is no use in the current live range that requires a register.
2817 // We can just spill it.
2818 Spill(current);
2819 return;
2820 }
2821
2822 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2823 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
2824
2825 for (int i = 0; i < num_registers(); i++) {
2826 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
2827 }
2828
2829 for (LiveRange* range : active_live_ranges()) {
2830 int cur_reg = range->assigned_register();
2831 if (range->TopLevel()->IsFixed() ||
2832 !range->CanBeSpilled(current->Start())) {
2833 block_pos[cur_reg] = use_pos[cur_reg] =
2834 LifetimePosition::GapFromInstructionIndex(0);
2835 } else {
2836 UsePosition* next_use =
2837 range->NextUsePositionRegisterIsBeneficial(current->Start());
2838 if (next_use == nullptr) {
2839 use_pos[cur_reg] = range->End();
2840 } else {
2841 use_pos[cur_reg] = next_use->pos();
2842 }
2843 }
2844 }
2845
2846 for (LiveRange* range : inactive_live_ranges()) {
2847 DCHECK(range->End() > current->Start());
2848 LifetimePosition next_intersection = range->FirstIntersection(current);
2849 if (!next_intersection.IsValid()) continue;
2850 int cur_reg = range->assigned_register();
2851 if (range->TopLevel()->IsFixed()) {
2852 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
2853 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
2854 } else {
2855 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
2856 }
2857 }
2858
2859 int reg = allocatable_register_code(0);
2860 for (int i = 1; i < num_allocatable_registers(); ++i) {
2861 int code = allocatable_register_code(i);
2862 if (use_pos[code] > use_pos[reg]) {
2863 reg = code;
2864 }
2865 }
2866
2867 LifetimePosition pos = use_pos[reg];
2868
2869 if (pos < register_use->pos()) {
2870 // All registers are blocked before the first use that requires a register.
2871 // Spill starting part of live range up to that use.
2872 SpillBetween(current, current->Start(), register_use->pos());
2873 return;
2874 }
2875
2876 if (block_pos[reg] < current->End()) {
2877 // Register becomes blocked before the current range end. Split before that
2878 // position.
2879 LiveRange* tail =
2880 SplitBetween(current, current->Start(), block_pos[reg].Start());
2881 AddToUnhandledSorted(tail);
2882 }
2883
2884 // Register reg is not blocked for the whole range.
2885 DCHECK(block_pos[reg] >= current->End());
2886 TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
2887 current->TopLevel()->vreg(), current->relative_id());
2888 SetLiveRangeAssignedRegister(current, reg);
2889
2890 // This register was not free. Thus we need to find and spill
2891 // parts of active and inactive live regions that use the same register
2892 // at the same lifetime positions as current.
2893 SplitAndSpillIntersecting(current);
2894}
2895
2896
2897void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2898 DCHECK(current->HasRegisterAssigned());
2899 int reg = current->assigned_register();
2900 LifetimePosition split_pos = current->Start();
2901 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2902 LiveRange* range = active_live_ranges()[i];
2903 if (range->assigned_register() == reg) {
2904 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2905 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2906 if (next_pos == nullptr) {
2907 SpillAfter(range, spill_pos);
2908 } else {
2909 // When spilling between spill_pos and next_pos ensure that the range
2910 // remains spilled at least until the start of the current live range.
2911 // This guarantees that we will not introduce new unhandled ranges that
2912 // start before the current range as this violates allocation invariant
2913 // and will lead to an inconsistent state of active and inactive
2914 // live-ranges: ranges are allocated in order of their start positions,
2915 // ranges are retired from active/inactive when the start of the
2916 // current live-range is larger than their end.
2917 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2918 }
2919 ActiveToHandled(range);
2920 --i;
2921 }
2922 }
2923
2924 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2925 LiveRange* range = inactive_live_ranges()[i];
2926 DCHECK(range->End() > current->Start());
2927 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
2928 LifetimePosition next_intersection = range->FirstIntersection(current);
2929 if (next_intersection.IsValid()) {
2930 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2931 if (next_pos == nullptr) {
2932 SpillAfter(range, split_pos);
2933 } else {
2934 next_intersection = Min(next_intersection, next_pos->pos());
2935 SpillBetween(range, split_pos, next_intersection);
2936 }
2937 InactiveToHandled(range);
2938 --i;
2939 }
2940 }
2941 }
2942}
2943
2944
2945bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
2946 if (!range->is_phi()) return false;
2947
2948 DCHECK(!range->HasSpillOperand());
2949 RegisterAllocationData::PhiMapValue* phi_map_value =
2950 data()->GetPhiMapValueFor(range);
2951 const PhiInstruction* phi = phi_map_value->phi();
2952 const InstructionBlock* block = phi_map_value->block();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002953 // Count the number of spilled operands.
2954 size_t spilled_count = 0;
2955 LiveRange* first_op = nullptr;
2956 for (size_t i = 0; i < phi->operands().size(); i++) {
2957 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002958 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
2959 if (!op_range->TopLevel()->HasSpillRange()) continue;
2960 const InstructionBlock* pred =
2961 code()->InstructionBlockAt(block->predecessors()[i]);
2962 LifetimePosition pred_end =
2963 LifetimePosition::InstructionFromInstructionIndex(
2964 pred->last_instruction_index());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002965 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
2966 op_range = op_range->next();
2967 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002968 if (op_range != nullptr && op_range->spilled()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002969 spilled_count++;
2970 if (first_op == nullptr) {
2971 first_op = op_range->TopLevel();
2972 }
2973 }
2974 }
2975
2976 // Only continue if more than half of the operands are spilled.
2977 if (spilled_count * 2 <= phi->operands().size()) {
2978 return false;
2979 }
2980
2981 // Try to merge the spilled operands and count the number of merged spilled
2982 // operands.
2983 DCHECK(first_op != nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002984 SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002985 size_t num_merged = 1;
2986 for (size_t i = 1; i < phi->operands().size(); i++) {
2987 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002988 TopLevelLiveRange* op_range = data()->live_ranges()[op];
2989 if (!op_range->HasSpillRange()) continue;
2990 SpillRange* op_spill = op_range->GetSpillRange();
2991 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002992 num_merged++;
2993 }
2994 }
2995
2996 // Only continue if enough operands could be merged to the
2997 // same spill slot.
2998 if (num_merged * 2 <= phi->operands().size() ||
2999 AreUseIntervalsIntersecting(first_op_spill->interval(),
3000 range->first_interval())) {
3001 return false;
3002 }
3003
3004 // If the range does not need register soon, spill it to the merged
3005 // spill range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003006 LifetimePosition next_pos = range->Start();
3007 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3008 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003009 if (pos == nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003010 SpillRange* spill_range =
3011 range->TopLevel()->HasSpillRange()
3012 ? range->TopLevel()->GetSpillRange()
3013 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3014 bool merged = first_op_spill->TryMerge(spill_range);
3015 CHECK(merged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003016 Spill(range);
3017 return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003018 } else if (pos->pos() > range->Start().NextStart()) {
3019 SpillRange* spill_range =
3020 range->TopLevel()->HasSpillRange()
3021 ? range->TopLevel()->GetSpillRange()
3022 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3023 bool merged = first_op_spill->TryMerge(spill_range);
3024 CHECK(merged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003025 SpillBetween(range, range->Start(), pos->pos());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003026 DCHECK(UnhandledIsSorted());
3027 return true;
3028 }
3029 return false;
3030}
3031
3032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003033void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3034 LiveRange* second_part = SplitRangeAt(range, pos);
3035 Spill(second_part);
3036}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003037
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003038
3039void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3040 LifetimePosition end) {
3041 SpillBetweenUntil(range, start, start, end);
3042}
3043
3044
3045void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3046 LifetimePosition start,
3047 LifetimePosition until,
3048 LifetimePosition end) {
3049 CHECK(start < end);
3050 LiveRange* second_part = SplitRangeAt(range, start);
3051
3052 if (second_part->Start() < end) {
3053 // The split result intersects with [start, end[.
3054 // Split it at position between ]start+1, end[, spill the middle part
3055 // and put the rest to unhandled.
3056 LifetimePosition third_part_end = end.PrevStart().End();
3057 if (data()->IsBlockBoundary(end.Start())) {
3058 third_part_end = end.Start();
3059 }
3060 LiveRange* third_part = SplitBetween(
3061 second_part, Max(second_part->Start().End(), until), third_part_end);
3062
3063 DCHECK(third_part != second_part);
3064
3065 Spill(second_part);
3066 AddToUnhandledSorted(third_part);
3067 } else {
3068 // The split result does not intersect with [start, end[.
3069 // Nothing to spill. Just put it to unhandled as whole.
3070 AddToUnhandledSorted(second_part);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003071 }
3072}
3073
3074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003075SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3076 : data_(data) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003077
3078
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003079void SpillSlotLocator::LocateSpillSlots() {
3080 const InstructionSequence* code = data()->code();
3081 for (TopLevelLiveRange* range : data()->live_ranges()) {
3082 if (range == nullptr || range->IsEmpty()) continue;
3083 // We care only about ranges which spill in the frame.
3084 if (!range->HasSpillRange()) continue;
3085 if (range->IsSpilledOnlyInDeferredBlocks()) {
3086 for (LiveRange* child = range; child != nullptr; child = child->next()) {
3087 if (child->spilled()) {
3088 code->GetInstructionBlock(child->Start().ToInstructionIndex())
3089 ->mark_needs_frame();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003090 }
3091 }
3092 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003093 TopLevelLiveRange::SpillMoveInsertionList* spills =
Ben Murdoch097c5b22016-05-18 11:27:45 +01003094 range->GetSpillMoveInsertionLocations();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003095 DCHECK_NOT_NULL(spills);
3096 for (; spills != nullptr; spills = spills->next) {
3097 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003098 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003099 }
3100 }
3101}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003103
3104OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3105
3106
3107void OperandAssigner::AssignSpillSlots() {
3108 ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3109 // Merge disjoint spill ranges
3110 for (size_t i = 0; i < spill_ranges.size(); ++i) {
3111 SpillRange* range = spill_ranges[i];
3112 if (range == nullptr) continue;
3113 if (range->IsEmpty()) continue;
3114 for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
3115 SpillRange* other = spill_ranges[j];
3116 if (other != nullptr && !other->IsEmpty()) {
3117 range->TryMerge(other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003118 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003119 }
3120 }
3121 // Allocate slots for the merged spill ranges.
3122 for (SpillRange* range : spill_ranges) {
3123 if (range == nullptr || range->IsEmpty()) continue;
3124 // Allocate a new operand referring to the spill slot.
3125 if (!range->HasSlot()) {
3126 int byte_width = range->ByteWidth();
3127 int index = data()->frame()->AllocateSpillSlot(byte_width);
3128 range->set_assigned_slot(index);
3129 }
3130 }
3131}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003133
3134void OperandAssigner::CommitAssignment() {
3135 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3136 if (top_range == nullptr || top_range->IsEmpty()) continue;
3137 InstructionOperand spill_operand;
3138 if (top_range->HasSpillOperand()) {
3139 spill_operand = *top_range->TopLevel()->GetSpillOperand();
3140 } else if (top_range->TopLevel()->HasSpillRange()) {
3141 spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3142 }
3143 if (top_range->is_phi()) {
3144 data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3145 top_range->GetAssignedOperand());
3146 }
3147 for (LiveRange* range = top_range; range != nullptr;
3148 range = range->next()) {
3149 InstructionOperand assigned = range->GetAssignedOperand();
3150 range->ConvertUsesToOperand(assigned, spill_operand);
3151 }
3152
3153 if (!spill_operand.IsInvalid()) {
3154 // If this top level range has a child spilled in a deferred block, we use
3155 // the range and control flow connection mechanism instead of spilling at
3156 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3157 // phases. Normally, when we spill at definition, we do not insert a
3158 // connecting move when a successor child range is spilled - because the
3159 // spilled range picks up its value from the slot which was assigned at
3160 // definition. For ranges that are determined to spill only in deferred
Ben Murdoch097c5b22016-05-18 11:27:45 +01003161 // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3162 // where a spill operand is expected, and then finalize by inserting the
3163 // spills in the deferred blocks dominators.
3164 if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003165 // Spill at definition if the range isn't spilled only in deferred
3166 // blocks.
3167 top_range->CommitSpillMoves(
3168 data()->code(), spill_operand,
3169 top_range->has_slot_use() || top_range->spilled());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003170 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003171 }
3172 }
3173}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003174
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003175
3176ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3177 : data_(data) {}
3178
3179
3180bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3181 int safe_point = 0;
3182 for (ReferenceMap* map : *data()->code()->reference_maps()) {
3183 if (safe_point > map->instruction_position()) return false;
3184 safe_point = map->instruction_position();
3185 }
3186 return true;
3187}
3188
3189
3190void ReferenceMapPopulator::PopulateReferenceMaps() {
3191 DCHECK(SafePointsAreInOrder());
3192 // Map all delayed references.
3193 for (RegisterAllocationData::DelayedReference& delayed_reference :
3194 data()->delayed_references()) {
3195 delayed_reference.map->RecordReference(
3196 AllocatedOperand::cast(*delayed_reference.operand));
3197 }
3198 // Iterate over all safe point positions and record a pointer
3199 // for all spilled live ranges at this point.
3200 int last_range_start = 0;
3201 const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3202 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3203 for (TopLevelLiveRange* range : data()->live_ranges()) {
3204 if (range == nullptr) continue;
3205 // Skip non-reference values.
3206 if (!data()->IsReference(range)) continue;
3207 // Skip empty live ranges.
3208 if (range->IsEmpty()) continue;
3209 if (range->has_preassigned_slot()) continue;
3210
3211 // Find the extent of the range and its children.
3212 int start = range->Start().ToInstructionIndex();
3213 int end = 0;
3214 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3215 LifetimePosition this_end = cur->End();
3216 if (this_end.ToInstructionIndex() > end)
3217 end = this_end.ToInstructionIndex();
3218 DCHECK(cur->Start().ToInstructionIndex() >= start);
3219 }
3220
3221 // Most of the ranges are in order, but not all. Keep an eye on when they
3222 // step backwards and reset the first_it so we don't miss any safe points.
3223 if (start < last_range_start) first_it = reference_maps->begin();
3224 last_range_start = start;
3225
3226 // Step across all the safe points that are before the start of this range,
3227 // recording how far we step in order to save doing this for the next range.
3228 for (; first_it != reference_maps->end(); ++first_it) {
3229 ReferenceMap* map = *first_it;
3230 if (map->instruction_position() >= start) break;
3231 }
3232
3233 InstructionOperand spill_operand;
3234 if (((range->HasSpillOperand() &&
3235 !range->GetSpillOperand()->IsConstant()) ||
3236 range->HasSpillRange())) {
3237 if (range->HasSpillOperand()) {
3238 spill_operand = *range->GetSpillOperand();
3239 } else {
3240 spill_operand = range->GetSpillRangeOperand();
3241 }
3242 DCHECK(spill_operand.IsStackSlot());
3243 DCHECK_EQ(MachineRepresentation::kTagged,
3244 AllocatedOperand::cast(spill_operand).representation());
3245 }
3246
3247 LiveRange* cur = range;
3248 // Step through the safe points to see whether they are in the range.
3249 for (auto it = first_it; it != reference_maps->end(); ++it) {
3250 ReferenceMap* map = *it;
3251 int safe_point = map->instruction_position();
3252
3253 // The safe points are sorted so we can stop searching here.
3254 if (safe_point - 1 > end) break;
3255
3256 // Advance to the next active range that covers the current
3257 // safe point position.
3258 LifetimePosition safe_point_pos =
3259 LifetimePosition::InstructionFromInstructionIndex(safe_point);
3260
3261 // Search for the child range (cur) that covers safe_point_pos. If we
3262 // don't find it before the children pass safe_point_pos, keep cur at
3263 // the last child, because the next safe_point_pos may be covered by cur.
3264 // This may happen if cur has more than one interval, and the current
3265 // safe_point_pos is in between intervals.
3266 // For that reason, cur may be at most the last child.
3267 DCHECK_NOT_NULL(cur);
3268 DCHECK(safe_point_pos >= cur->Start() || range == cur);
3269 bool found = false;
3270 while (!found) {
3271 if (cur->Covers(safe_point_pos)) {
3272 found = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003273 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003274 LiveRange* next = cur->next();
3275 if (next == nullptr || next->Start() > safe_point_pos) {
3276 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003277 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003278 cur = next;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003279 }
3280 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003281
3282 if (!found) {
3283 continue;
3284 }
3285
3286 // Check if the live range is spilled and the safe point is after
3287 // the spill position.
3288 int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3289 ? cur->Start().ToInstructionIndex()
3290 : range->spill_start_index();
3291
3292 if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3293 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
3294 range->vreg(), spill_index, safe_point);
3295 map->RecordReference(AllocatedOperand::cast(spill_operand));
3296 }
3297
3298 if (!cur->spilled()) {
3299 TRACE(
3300 "Pointer in register for range %d:%d (start at %d) "
3301 "at safe point %d\n",
3302 range->vreg(), cur->relative_id(), cur->Start().value(),
3303 safe_point);
3304 InstructionOperand operand = cur->GetAssignedOperand();
3305 DCHECK(!operand.IsStackSlot());
3306 DCHECK_EQ(MachineRepresentation::kTagged,
3307 AllocatedOperand::cast(operand).representation());
3308 map->RecordReference(AllocatedOperand::cast(operand));
3309 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003310 }
3311 }
3312}
3313
3314
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003315LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3316 : data_(data) {}
3317
3318
3319bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3320 const InstructionBlock* block) const {
3321 if (block->PredecessorCount() != 1) return false;
3322 return block->predecessors()[0].IsNext(block->rpo_number());
3323}
3324
3325
3326void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003327 // Lazily linearize live ranges in memory for fast lookup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003328 LiveRangeFinder finder(data(), local_zone);
3329 ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3330 for (const InstructionBlock* block : code()->instruction_blocks()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003331 if (CanEagerlyResolveControlFlow(block)) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003332 BitVector* live = live_in_sets[block->rpo_number().ToInt()];
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003333 BitVector::Iterator iterator(live);
3334 while (!iterator.Done()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003335 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
3336 for (const RpoNumber& pred : block->predecessors()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003337 FindResult result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003338 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3339 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003340 continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341 }
3342 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3343 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3344 if (pred_op.Equals(cur_op)) continue;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003345 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3346 // We're doing a reload.
3347 // We don't need to, if:
3348 // 1) there's no register use in this block, and
3349 // 2) the range ends before the block does, and
3350 // 3) we don't have a successor, or the successor is spilled.
3351 LifetimePosition block_start =
3352 LifetimePosition::GapFromInstructionIndex(block->code_start());
3353 LifetimePosition block_end =
3354 LifetimePosition::GapFromInstructionIndex(block->code_end());
3355 const LiveRange* current = result.cur_cover_;
3356 const LiveRange* successor = current->next();
3357 if (current->End() < block_end &&
3358 (successor == nullptr || successor->spilled())) {
3359 // verify point 1: no register use. We can go to the end of the
3360 // range, since it's all within the block.
3361
3362 bool uses_reg = false;
3363 for (const UsePosition* use = current->NextUsePosition(block_start);
3364 use != nullptr; use = use->next()) {
3365 if (use->operand()->IsAnyRegister()) {
3366 uses_reg = true;
3367 break;
3368 }
3369 }
3370 if (!uses_reg) continue;
3371 }
3372 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3373 pred_block->IsDeferred()) {
3374 // The spill location should be defined in pred_block, so add
3375 // pred_block to the list of blocks requiring a spill operand.
3376 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3377 pred_block->rpo_number().ToInt());
3378 }
3379 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003380 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3381 USE(move_loc);
3382 DCHECK_IMPLIES(
3383 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3384 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3385 code()->GetInstructionBlock(move_loc)->IsDeferred());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003386 }
3387 iterator.Advance();
3388 }
3389 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003390
3391 // At this stage, we collected blocks needing a spill operand from
3392 // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3393 // deferred blocks.
3394 for (TopLevelLiveRange* top : data()->live_ranges()) {
3395 if (top == nullptr || top->IsEmpty() ||
3396 !top->IsSpilledOnlyInDeferredBlocks())
3397 continue;
3398 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3399 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400}
3401
3402
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003403int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3404 const InstructionOperand& cur_op,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003405 const InstructionBlock* pred,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 const InstructionOperand& pred_op) {
3407 DCHECK(!pred_op.Equals(cur_op));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003408 int gap_index;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003409 Instruction::GapPosition position;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003410 if (block->PredecessorCount() == 1) {
3411 gap_index = block->first_instruction_index();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003412 position = Instruction::START;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003413 } else {
3414 DCHECK(pred->SuccessorCount() == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003415 DCHECK(!code()
3416 ->InstructionAt(pred->last_instruction_index())
3417 ->HasReferenceMap());
3418 gap_index = pred->last_instruction_index();
3419 position = Instruction::END;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003420 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003421 data()->AddGapMove(gap_index, position, pred_op, cur_op);
3422 return gap_index;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003423}
3424
3425
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003426void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3427 DelayedInsertionMap delayed_insertion_map(local_zone);
3428 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3429 if (top_range == nullptr) continue;
3430 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3431 LiveRange* first_range = top_range;
3432 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3433 first_range = second_range, second_range = second_range->next()) {
3434 LifetimePosition pos = second_range->Start();
3435 // Add gap move if the two live ranges touch and there is no block
3436 // boundary.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003437 if (second_range->spilled()) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003438 if (first_range->End() != pos) continue;
3439 if (data()->IsBlockBoundary(pos) &&
3440 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003441 continue;
3442 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003443 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3444 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3445 if (prev_operand.Equals(cur_operand)) continue;
3446 bool delay_insertion = false;
3447 Instruction::GapPosition gap_pos;
3448 int gap_index = pos.ToInstructionIndex();
Ben Murdoch097c5b22016-05-18 11:27:45 +01003449 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3450 cur_operand.IsAnyRegister()) {
3451 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3452 DCHECK(block->IsDeferred());
3453 // Performing a reload in this block, meaning the spill operand must
3454 // be defined here.
3455 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3456 block->rpo_number().ToInt());
3457 }
3458
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003459 if (pos.IsGapPosition()) {
3460 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003461 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003462 if (pos.IsStart()) {
3463 delay_insertion = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003464 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003465 gap_index++;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003466 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003467 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3468 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003469 // Reloads or spills for spilled in deferred blocks ranges must happen
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003470 // only in deferred blocks.
3471 DCHECK_IMPLIES(
3472 connect_spilled &&
3473 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3474 code()->GetInstructionBlock(gap_index)->IsDeferred());
3475
3476 ParallelMove* move =
3477 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3478 gap_pos, code_zone());
3479 if (!delay_insertion) {
3480 move->AddMove(prev_operand, cur_operand);
3481 } else {
3482 delayed_insertion_map.insert(
3483 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 }
3485 }
3486 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003487 if (delayed_insertion_map.empty()) return;
3488 // Insert all the moves which should occur after the stored move.
3489 ZoneVector<MoveOperands*> to_insert(local_zone);
3490 ZoneVector<MoveOperands*> to_eliminate(local_zone);
3491 to_insert.reserve(4);
3492 to_eliminate.reserve(4);
3493 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3494 for (auto it = delayed_insertion_map.begin();; ++it) {
3495 bool done = it == delayed_insertion_map.end();
3496 if (done || it->first.first != moves) {
3497 // Commit the MoveOperands for current ParallelMove.
3498 for (MoveOperands* move : to_eliminate) {
3499 move->Eliminate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003500 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003501 for (MoveOperands* move : to_insert) {
3502 moves->push_back(move);
3503 }
3504 if (done) break;
3505 // Reset state.
3506 to_eliminate.clear();
3507 to_insert.clear();
3508 moves = it->first.first;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003509 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003510 // Gather all MoveOperands for a single ParallelMove.
3511 MoveOperands* move =
3512 new (code_zone()) MoveOperands(it->first.second, it->second);
3513 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3514 to_insert.push_back(move);
3515 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003516 }
3517}
3518
3519
Ben Murdoch097c5b22016-05-18 11:27:45 +01003520void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3521 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3522 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3523 DCHECK(!range->spilled());
3524
3525 InstructionSequence* code = data()->code();
3526 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3527
3528 TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3529 range->vreg());
3530 // If we have ranges that aren't spilled but require the operand on the stack,
3531 // make sure we insert the spill.
3532 for (const LiveRange* child = range; child != nullptr;
3533 child = child->next()) {
3534 for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3535 pos = pos->next()) {
3536 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3537 continue;
3538 range->AddBlockRequiringSpillOperand(
3539 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3540 ->rpo_number());
3541 }
3542 }
3543
3544 ZoneQueue<int> worklist(temp_zone);
3545
3546 for (BitVector::Iterator iterator(
3547 range->GetListOfBlocksRequiringSpillOperands());
3548 !iterator.Done(); iterator.Advance()) {
3549 worklist.push(iterator.Current());
3550 }
3551
3552 // Seek the deferred blocks that dominate locations requiring spill operands,
3553 // and spill there. We only need to spill at the start of such blocks.
3554 BitVector done_blocks(
3555 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3556 while (!worklist.empty()) {
3557 int block_id = worklist.front();
3558 worklist.pop();
3559 if (done_blocks.Contains(block_id)) continue;
3560 done_blocks.Add(block_id);
3561 const InstructionBlock* spill_block =
3562 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3563
3564 for (const RpoNumber& pred : spill_block->predecessors()) {
3565 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3566
3567 if (pred_block->IsDeferred()) {
3568 worklist.push(pred_block->rpo_number().ToInt());
3569 } else {
3570 LifetimePosition pred_end =
3571 LifetimePosition::InstructionFromInstructionIndex(
3572 pred_block->last_instruction_index());
3573
3574 LiveRangeBound* bound = array->Find(pred_end);
3575
3576 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3577
3578 data()->AddGapMove(spill_block->first_instruction_index(),
3579 Instruction::GapPosition::START, pred_op,
3580 spill_operand);
3581 }
3582 }
3583 }
3584}
3585
3586
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003587} // namespace compiler
3588} // namespace internal
3589} // namespace v8