blob: 82faf752427de95a819c9e686810bae99fe2b6e3 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006#include "src/compiler/linkage.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "src/compiler/register-allocator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/string-stream.h"
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#define TRACE(...) \
15 do { \
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
17 } while (false)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020namespace {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040023 auto it = std::find(v->begin(), v->end(), range);
24 DCHECK(it != v->end());
25 v->erase(it);
26}
27
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
29 return kind == DOUBLE_REGISTERS ? cfg->num_double_registers()
30 : cfg->num_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031}
32
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
35 RegisterKind kind) {
36 return kind == DOUBLE_REGISTERS
37 ? cfg->num_allocatable_aliased_double_registers()
38 : cfg->num_allocatable_general_registers();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039}
40
41
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
43 RegisterKind kind) {
44 return kind == DOUBLE_REGISTERS ? cfg->allocatable_double_codes()
45 : cfg->allocatable_general_codes();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046}
47
48
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
50 const InstructionBlock* block) {
51 RpoNumber index = block->loop_header();
52 if (!index.IsValid()) return nullptr;
53 return sequence->InstructionBlockAt(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054}
55
56
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
58 LifetimePosition pos) {
59 return code->GetInstructionBlock(pos.ToInstructionIndex());
60}
61
62
63Instruction* GetLastInstruction(InstructionSequence* code,
64 const InstructionBlock* block) {
65 return code->InstructionAt(block->last_instruction_index());
66}
67
68
69bool IsOutputRegisterOf(Instruction* instr, Register reg) {
70 for (size_t i = 0; i < instr->OutputCount(); i++) {
71 InstructionOperand* output = instr->OutputAt(i);
72 if (output->IsRegister() &&
73 LocationOperand::cast(output)->GetRegister().is(reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 return true;
75 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 }
77 return false;
78}
79
80
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000081bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
82 for (size_t i = 0; i < instr->OutputCount(); i++) {
83 InstructionOperand* output = instr->OutputAt(i);
84 if (output->IsDoubleRegister() &&
85 LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
86 return true;
87 }
88 }
89 return false;
90}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
92
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000093// TODO(dcarney): fix frame to allow frame accesses to half size location.
94int GetByteWidth(MachineRepresentation rep) {
95 switch (rep) {
96 case MachineRepresentation::kBit:
97 case MachineRepresentation::kWord8:
98 case MachineRepresentation::kWord16:
99 case MachineRepresentation::kWord32:
100 case MachineRepresentation::kTagged:
101 return kPointerSize;
102 case MachineRepresentation::kFloat32:
103 case MachineRepresentation::kWord64:
104 case MachineRepresentation::kFloat64:
105 return 8;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100106 case MachineRepresentation::kSimd128:
107 return 16;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000108 case MachineRepresentation::kNone:
109 break;
110 }
111 UNREACHABLE();
112 return 0;
113}
114
115} // namespace
116
Ben Murdoch097c5b22016-05-18 11:27:45 +0100117class LiveRangeBound {
118 public:
119 explicit LiveRangeBound(LiveRange* range, bool skip)
120 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
121 DCHECK(!range->IsEmpty());
122 }
123
124 bool CanCover(LifetimePosition position) {
125 return start_ <= position && position < end_;
126 }
127
128 LiveRange* const range_;
129 const LifetimePosition start_;
130 const LifetimePosition end_;
131 const bool skip_;
132
133 private:
134 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
135};
136
137
138struct FindResult {
139 LiveRange* cur_cover_;
140 LiveRange* pred_cover_;
141};
142
143
144class LiveRangeBoundArray {
145 public:
146 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
147
148 bool ShouldInitialize() { return start_ == nullptr; }
149
150 void Initialize(Zone* zone, TopLevelLiveRange* range) {
151 length_ = range->GetChildCount();
152
153 start_ = zone->NewArray<LiveRangeBound>(length_);
154 LiveRangeBound* curr = start_;
155 // Normally, spilled ranges do not need connecting moves, because the spill
156 // location has been assigned at definition. For ranges spilled in deferred
157 // blocks, that is not the case, so we need to connect the spilled children.
158 for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
159 new (curr) LiveRangeBound(i, i->spilled());
160 }
161 }
162
163 LiveRangeBound* Find(const LifetimePosition position) const {
164 size_t left_index = 0;
165 size_t right_index = length_;
166 while (true) {
167 size_t current_index = left_index + (right_index - left_index) / 2;
168 DCHECK(right_index > current_index);
169 LiveRangeBound* bound = &start_[current_index];
170 if (bound->start_ <= position) {
171 if (position < bound->end_) return bound;
172 DCHECK(left_index < current_index);
173 left_index = current_index;
174 } else {
175 right_index = current_index;
176 }
177 }
178 }
179
180 LiveRangeBound* FindPred(const InstructionBlock* pred) {
181 LifetimePosition pred_end =
182 LifetimePosition::InstructionFromInstructionIndex(
183 pred->last_instruction_index());
184 return Find(pred_end);
185 }
186
187 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
188 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
189 succ->first_instruction_index());
190 return Find(succ_start);
191 }
192
193 bool FindConnectableSubranges(const InstructionBlock* block,
194 const InstructionBlock* pred,
195 FindResult* result) const {
196 LifetimePosition pred_end =
197 LifetimePosition::InstructionFromInstructionIndex(
198 pred->last_instruction_index());
199 LiveRangeBound* bound = Find(pred_end);
200 result->pred_cover_ = bound->range_;
201 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
202 block->first_instruction_index());
203
204 if (bound->CanCover(cur_start)) {
205 // Both blocks are covered by the same range, so there is nothing to
206 // connect.
207 return false;
208 }
209 bound = Find(cur_start);
210 if (bound->skip_) {
211 return false;
212 }
213 result->cur_cover_ = bound->range_;
214 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
215 return (result->cur_cover_ != result->pred_cover_);
216 }
217
218 private:
219 size_t length_;
220 LiveRangeBound* start_;
221
222 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
223};
224
225
226class LiveRangeFinder {
227 public:
228 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
229 : data_(data),
230 bounds_length_(static_cast<int>(data_->live_ranges().size())),
231 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
232 zone_(zone) {
233 for (int i = 0; i < bounds_length_; ++i) {
234 new (&bounds_[i]) LiveRangeBoundArray();
235 }
236 }
237
238 LiveRangeBoundArray* ArrayFor(int operand_index) {
239 DCHECK(operand_index < bounds_length_);
240 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
241 DCHECK(range != nullptr && !range->IsEmpty());
242 LiveRangeBoundArray* array = &bounds_[operand_index];
243 if (array->ShouldInitialize()) {
244 array->Initialize(zone_, range);
245 }
246 return array;
247 }
248
249 private:
250 const RegisterAllocationData* const data_;
251 const int bounds_length_;
252 LiveRangeBoundArray* const bounds_;
253 Zone* const zone_;
254
255 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
256};
257
258
259typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
260
261
262struct DelayedInsertionMapCompare {
263 bool operator()(const DelayedInsertionMapKey& a,
264 const DelayedInsertionMapKey& b) const {
265 if (a.first == b.first) {
266 return a.second.Compare(b.second);
267 }
268 return a.first < b.first;
269 }
270};
271
272
273typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
274 DelayedInsertionMapCompare> DelayedInsertionMap;
275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276
277UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
278 void* hint, UsePositionHintType hint_type)
279 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
280 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
281 bool register_beneficial = true;
282 UsePositionType type = UsePositionType::kAny;
283 if (operand_ != nullptr && operand_->IsUnallocated()) {
284 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
285 if (unalloc->HasRegisterPolicy()) {
286 type = UsePositionType::kRequiresRegister;
287 } else if (unalloc->HasSlotPolicy()) {
288 type = UsePositionType::kRequiresSlot;
289 register_beneficial = false;
290 } else {
291 register_beneficial = !unalloc->HasAnyPolicy();
292 }
293 }
294 flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
295 RegisterBeneficialField::encode(register_beneficial) |
296 AssignedRegisterField::encode(kUnassignedRegister);
297 DCHECK(pos_.IsValid());
298}
299
300
301bool UsePosition::HasHint() const {
302 int hint_register;
303 return HintRegister(&hint_register);
304}
305
306
307bool UsePosition::HintRegister(int* register_code) const {
308 if (hint_ == nullptr) return false;
309 switch (HintTypeField::decode(flags_)) {
310 case UsePositionHintType::kNone:
311 case UsePositionHintType::kUnresolved:
312 return false;
313 case UsePositionHintType::kUsePos: {
314 UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
315 int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
316 if (assigned_register == kUnassignedRegister) return false;
317 *register_code = assigned_register;
318 return true;
319 }
320 case UsePositionHintType::kOperand: {
321 InstructionOperand* operand =
322 reinterpret_cast<InstructionOperand*>(hint_);
323 int assigned_register =
324 operand->IsRegister()
325 ? LocationOperand::cast(operand)->GetRegister().code()
326 : LocationOperand::cast(operand)->GetDoubleRegister().code();
327 *register_code = assigned_register;
328 return true;
329 }
330 case UsePositionHintType::kPhi: {
331 RegisterAllocationData::PhiMapValue* phi =
332 reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
333 int assigned_register = phi->assigned_register();
334 if (assigned_register == kUnassignedRegister) return false;
335 *register_code = assigned_register;
336 return true;
337 }
338 }
339 UNREACHABLE();
340 return false;
341}
342
343
344UsePositionHintType UsePosition::HintTypeForOperand(
345 const InstructionOperand& op) {
346 switch (op.kind()) {
347 case InstructionOperand::CONSTANT:
348 case InstructionOperand::IMMEDIATE:
349 case InstructionOperand::EXPLICIT:
350 return UsePositionHintType::kNone;
351 case InstructionOperand::UNALLOCATED:
352 return UsePositionHintType::kUnresolved;
353 case InstructionOperand::ALLOCATED:
354 if (op.IsRegister() || op.IsDoubleRegister()) {
355 return UsePositionHintType::kOperand;
356 } else {
357 DCHECK(op.IsStackSlot() || op.IsDoubleStackSlot());
358 return UsePositionHintType::kNone;
359 }
360 case InstructionOperand::INVALID:
361 break;
362 }
363 UNREACHABLE();
364 return UsePositionHintType::kNone;
365}
366
367
368void UsePosition::ResolveHint(UsePosition* use_pos) {
369 DCHECK_NOT_NULL(use_pos);
370 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
371 hint_ = use_pos;
372 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
373}
374
375
376void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
377 DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
378 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
379 flags_ = TypeField::encode(type) |
380 RegisterBeneficialField::encode(register_beneficial) |
381 HintTypeField::encode(HintTypeField::decode(flags_)) |
382 AssignedRegisterField::encode(kUnassignedRegister);
383}
384
385
386UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
387 DCHECK(Contains(pos) && pos != start());
388 UseInterval* after = new (zone) UseInterval(pos, end_);
389 after->next_ = next_;
390 next_ = nullptr;
391 end_ = pos;
392 return after;
393}
394
395
396void LifetimePosition::Print() const {
397 OFStream os(stdout);
398 os << *this << std::endl;
399}
400
401
402std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
403 os << '@' << pos.ToInstructionIndex();
404 if (pos.IsGapPosition()) {
405 os << 'g';
406 } else {
407 os << 'i';
408 }
409 if (pos.IsStart()) {
410 os << 's';
411 } else {
412 os << 'e';
413 }
414 return os;
415}
416
417
418const float LiveRange::kInvalidWeight = -1;
419const float LiveRange::kMaxWeight = std::numeric_limits<float>::max();
420
421
422LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
423 TopLevelLiveRange* top_level)
424 : relative_id_(relative_id),
425 bits_(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400426 last_interval_(nullptr),
427 first_interval_(nullptr),
428 first_pos_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429 top_level_(top_level),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400430 next_(nullptr),
431 current_interval_(nullptr),
432 last_processed_use_(nullptr),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 current_hint_position_(nullptr),
434 splitting_pointer_(nullptr),
435 size_(kInvalidSize),
436 weight_(kInvalidWeight),
437 group_(nullptr) {
438 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
439 bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
440 RepresentationField::encode(rep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441}
442
443
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444void LiveRange::VerifyPositions() const {
445 // Walk the positions, verifying that each is in an interval.
446 UseInterval* interval = first_interval_;
447 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
448 CHECK(Start() <= pos->pos());
449 CHECK(pos->pos() <= End());
450 CHECK_NOT_NULL(interval);
451 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
452 interval = interval->next();
453 CHECK_NOT_NULL(interval);
454 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400455 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456}
457
458
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000459void LiveRange::VerifyIntervals() const {
460 DCHECK(first_interval()->start() == Start());
461 LifetimePosition last_end = first_interval()->end();
462 for (UseInterval* interval = first_interval()->next(); interval != nullptr;
463 interval = interval->next()) {
464 DCHECK(last_end <= interval->start());
465 last_end = interval->end();
466 }
467 DCHECK(last_end == End());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400468}
469
470
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471void LiveRange::set_assigned_register(int reg) {
472 DCHECK(!HasRegisterAssigned() && !spilled());
473 bits_ = AssignedRegisterField::update(bits_, reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400474}
475
476
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477void LiveRange::UnsetAssignedRegister() {
478 DCHECK(HasRegisterAssigned() && !spilled());
479 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480}
481
482
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483void LiveRange::Spill() {
484 DCHECK(!spilled());
485 DCHECK(!TopLevel()->HasNoSpillType());
486 set_spilled(true);
487 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
488}
489
490
491RegisterKind LiveRange::kind() const {
492 return IsFloatingPoint(representation()) ? DOUBLE_REGISTERS
493 : GENERAL_REGISTERS;
494}
495
496
497UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
498 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
499 if (pos->HintRegister(register_index)) return pos;
500 }
501 return nullptr;
502}
503
504
505UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000506 UsePosition* use_pos = last_processed_use_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 if (use_pos == nullptr || use_pos->pos() > start) {
508 use_pos = first_pos();
509 }
510 while (use_pos != nullptr && use_pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000511 use_pos = use_pos->next();
512 }
513 last_processed_use_ = use_pos;
514 return use_pos;
515}
516
517
518UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 UsePosition* pos = NextUsePosition(start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400521 while (pos != nullptr && !pos->RegisterIsBeneficial()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 pos = pos->next();
523 }
524 return pos;
525}
526
527
528UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 LifetimePosition start) const {
530 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400531 UsePosition* prev = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532 while (pos != nullptr && pos->pos() < start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533 if (pos->RegisterIsBeneficial()) prev = pos;
534 pos = pos->next();
535 }
536 return prev;
537}
538
539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 UsePosition* pos = NextUsePosition(start);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000542 while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 pos = pos->next();
544 }
545 return pos;
546}
547
548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
550 for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
551 pos = pos->next()) {
552 if (pos->type() != UsePositionType::kRequiresSlot) continue;
553 return pos;
554 }
555 return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000556}
557
558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
560 // We cannot spill a live range that has a use requiring a register
561 // at the current or the immediate next position.
562 UsePosition* use_pos = NextRegisterPosition(pos);
563 if (use_pos == nullptr) return true;
564 return use_pos->pos() > pos.NextStart().End();
565}
566
567
568bool LiveRange::IsTopLevel() const { return top_level_ == this; }
569
570
571InstructionOperand LiveRange::GetAssignedOperand() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572 if (HasRegisterAssigned()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 DCHECK(!spilled());
574 return AllocatedOperand(LocationOperand::REGISTER, representation(),
575 assigned_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000577 DCHECK(spilled());
578 DCHECK(!HasRegisterAssigned());
579 if (TopLevel()->HasSpillOperand()) {
580 InstructionOperand* op = TopLevel()->GetSpillOperand();
581 DCHECK(!op->IsUnallocated());
582 return *op;
583 }
584 return TopLevel()->GetSpillRangeOperand();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585}
586
587
588UseInterval* LiveRange::FirstSearchIntervalForPosition(
589 LifetimePosition position) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400590 if (current_interval_ == nullptr) return first_interval_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 if (current_interval_->start() > position) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593 return first_interval_;
594 }
595 return current_interval_;
596}
597
598
599void LiveRange::AdvanceLastProcessedMarker(
600 UseInterval* to_start_of, LifetimePosition but_not_past) const {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400601 if (to_start_of == nullptr) return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 if (to_start_of->start() > but_not_past) return;
603 LifetimePosition start = current_interval_ == nullptr
604 ? LifetimePosition::Invalid()
605 : current_interval_->start();
606 if (to_start_of->start() > start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 current_interval_ = to_start_of;
608 }
609}
610
611
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
613 int new_id = TopLevel()->GetNextChildId();
614 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
615 DetachAt(position, child, zone);
616
617 child->top_level_ = TopLevel();
618 child->next_ = next_;
619 next_ = child;
620 return child;
621}
622
623
624UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
625 Zone* zone) {
626 DCHECK(Start() < position);
627 DCHECK(End() > position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 DCHECK(result->IsEmpty());
629 // Find the last interval that ends before the position. If the
630 // position is contained in one of the intervals in the chain, we
631 // split that interval and use the first part.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000632 UseInterval* current = FirstSearchIntervalForPosition(position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633
634 // If the split position coincides with the beginning of a use interval
635 // we need to split use positons in a special way.
636 bool split_at_start = false;
637
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000638 if (current->start() == position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 // When splitting at start we need to locate the previous use interval.
640 current = first_interval_;
641 }
642
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000643 UseInterval* after = nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644 while (current != nullptr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 if (current->Contains(position)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 after = current->SplitAt(position, zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 break;
648 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000649 UseInterval* next = current->next();
650 if (next->start() >= position) {
651 split_at_start = (next->start() == position);
652 after = next;
653 current->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654 break;
655 }
656 current = next;
657 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000658 DCHECK(nullptr != after);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659
660 // Partition original use intervals to the two live ranges.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 UseInterval* before = current;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000662 result->last_interval_ =
663 (last_interval_ == before)
664 ? after // Only interval in the range after split.
665 : last_interval_; // Last interval of the original range.
666 result->first_interval_ = after;
667 last_interval_ = before;
668
669 // Find the last use position before the split and the first use
670 // position after it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000671 UsePosition* use_after =
672 splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
673 ? first_pos()
674 : splitting_pointer_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400675 UsePosition* use_before = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 if (split_at_start) {
677 // The split position coincides with the beginning of a use interval (the
678 // end of a lifetime hole). Use at this position should be attributed to
679 // the split child because split child owns use interval covering it.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680 while (use_after != nullptr && use_after->pos() < position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 use_before = use_after;
682 use_after = use_after->next();
683 }
684 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000685 while (use_after != nullptr && use_after->pos() <= position) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 use_before = use_after;
687 use_after = use_after->next();
688 }
689 }
690
691 // Partition original use positions to the two live ranges.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 if (use_before != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000693 use_before->set_next(nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400695 first_pos_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 }
697 result->first_pos_ = use_after;
698
699 // Discard cached iteration state. It might be pointing
700 // to the use that no longer belongs to this live range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400701 last_processed_use_ = nullptr;
702 current_interval_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000704 // Invalidate size and weight of this range. The child range has them
705 // invalid at construction.
706 size_ = kInvalidSize;
707 weight_ = kInvalidWeight;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 VerifyChildStructure();
710 result->VerifyChildStructure();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000712 return use_before;
713}
714
715
716void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
717 LiveRange* child = this;
718 for (; child != nullptr; child = child->next()) {
719 child->top_level_ = new_top_level;
720 }
721}
722
723
724void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
725 const InstructionOperand& spill_op) {
726 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
727 DCHECK(Start() <= pos->pos() && pos->pos() <= End());
728 if (!pos->HasOperand()) continue;
729 switch (pos->type()) {
730 case UsePositionType::kRequiresSlot:
731 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
732 InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
733 break;
734 case UsePositionType::kRequiresRegister:
735 DCHECK(op.IsRegister() || op.IsDoubleRegister());
736 // Fall through.
737 case UsePositionType::kAny:
738 InstructionOperand::ReplaceWith(pos->operand(), &op);
739 break;
740 }
741 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000742}
743
744
745// This implements an ordering on live ranges so that they are ordered by their
746// start positions. This is needed for the correctness of the register
747// allocation algorithm. If two live ranges start at the same offset then there
748// is a tie breaker based on where the value is first used. This part of the
749// ordering is merely a heuristic.
750bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
751 LifetimePosition start = Start();
752 LifetimePosition other_start = other->Start();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 if (start == other_start) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 UsePosition* pos = first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400755 if (pos == nullptr) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756 UsePosition* other_pos = other->first_pos();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400757 if (other_pos == nullptr) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000758 return pos->pos() < other_pos->pos();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000760 return start < other_start;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761}
762
763
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764void LiveRange::SetUseHints(int register_index) {
765 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
766 if (!pos->HasOperand()) continue;
767 switch (pos->type()) {
768 case UsePositionType::kRequiresSlot:
769 break;
770 case UsePositionType::kRequiresRegister:
771 case UsePositionType::kAny:
772 pos->set_assigned_register(register_index);
773 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 }
776}
777
778
779bool LiveRange::CanCover(LifetimePosition position) const {
780 if (IsEmpty()) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000781 return Start() <= position && position < End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782}
783
784
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785bool LiveRange::Covers(LifetimePosition position) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786 if (!CanCover(position)) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000787 UseInterval* start_search = FirstSearchIntervalForPosition(position);
788 for (UseInterval* interval = start_search; interval != nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000789 interval = interval->next()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400790 DCHECK(interval->next() == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 interval->next()->start() >= interval->start());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792 AdvanceLastProcessedMarker(interval, position);
793 if (interval->Contains(position)) return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 if (interval->start() > position) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 }
796 return false;
797}
798
799
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
801 UseInterval* b = other->first_interval();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400802 if (b == nullptr) return LifetimePosition::Invalid();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000803 LifetimePosition advance_last_processed_up_to = b->start();
804 UseInterval* a = FirstSearchIntervalForPosition(b->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400805 while (a != nullptr && b != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806 if (a->start() > other->End()) break;
807 if (b->start() > End()) break;
808 LifetimePosition cur_intersection = a->Intersect(b);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 if (cur_intersection.IsValid()) {
810 return cur_intersection;
811 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 if (a->start() < b->start()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 a = a->next();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000814 if (a == nullptr || a->start() > other->End()) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
816 } else {
817 b = b->next();
818 }
819 }
820 return LifetimePosition::Invalid();
821}
822
823
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000824unsigned LiveRange::GetSize() {
825 if (size_ == kInvalidSize) {
826 size_ = 0;
827 for (const UseInterval* interval = first_interval(); interval != nullptr;
828 interval = interval->next()) {
829 size_ += (interval->end().value() - interval->start().value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 }
831 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000832
833 return static_cast<unsigned>(size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834}
835
836
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000837void LiveRange::Print(const RegisterConfiguration* config,
838 bool with_children) const {
839 OFStream os(stdout);
840 PrintableLiveRange wrapper;
841 wrapper.register_configuration_ = config;
842 for (const LiveRange* i = this; i != nullptr; i = i->next()) {
843 wrapper.range_ = i;
844 os << wrapper << std::endl;
845 if (!with_children) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000846 }
847}
848
849
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000850void LiveRange::Print(bool with_children) const {
851 const RegisterConfiguration* config =
852 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN);
853 Print(config, with_children);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854}
855
856
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000857struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
858 SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
859 SpillMoveInsertionList* next)
860 : gap_index(gap_index), operand(operand), next(next) {}
861 const int gap_index;
862 InstructionOperand* const operand;
863 SpillMoveInsertionList* const next;
864};
865
866
867TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
868 : LiveRange(0, rep, this),
869 vreg_(vreg),
870 last_child_id_(0),
871 splintered_from_(nullptr),
872 spill_operand_(nullptr),
873 spill_move_insertion_locations_(nullptr),
874 spilled_in_deferred_blocks_(false),
875 spill_start_index_(kMaxInt),
876 last_pos_(nullptr),
877 splinter_(nullptr),
878 has_preassigned_slot_(false) {
879 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
880}
881
882
883#if DEBUG
884int TopLevelLiveRange::debug_virt_reg() const {
885 return IsSplinter() ? splintered_from()->vreg() : vreg();
886}
887#endif
888
889
890void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
891 InstructionOperand* operand) {
892 DCHECK(HasNoSpillType());
893 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
894 gap_index, operand, spill_move_insertion_locations_);
895}
896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
898 const InstructionOperand& op,
899 bool might_be_duplicated) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100900 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000901 Zone* zone = sequence->zone();
902
Ben Murdoch097c5b22016-05-18 11:27:45 +0100903 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000904 to_spill != nullptr; to_spill = to_spill->next) {
905 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
906 ParallelMove* move =
907 instr->GetOrCreateParallelMove(Instruction::START, zone);
908 // Skip insertion if it's possible that the move exists already as a
909 // constraint move from a fixed output register to a slot.
910 if (might_be_duplicated || has_preassigned_slot()) {
911 bool found = false;
912 for (MoveOperands* move_op : *move) {
913 if (move_op->IsEliminated()) continue;
914 if (move_op->source().Equals(*to_spill->operand) &&
915 move_op->destination().Equals(op)) {
916 found = true;
917 if (has_preassigned_slot()) move_op->Eliminate();
918 break;
919 }
920 }
921 if (found) continue;
922 }
923 if (!has_preassigned_slot()) {
924 move->AddMove(*to_spill->operand, op);
925 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927}
928
929
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000930void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
931 DCHECK(HasNoSpillType());
932 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
933 set_spill_type(SpillType::kSpillOperand);
934 spill_operand_ = operand;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935}
936
937
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000938void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
939 DCHECK(!HasSpillOperand());
940 DCHECK(spill_range);
941 spill_range_ = spill_range;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000942}
943
944
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
946 SpillRange* spill_range = GetSpillRange();
947 int index = spill_range->assigned_slot();
948 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000949}
950
951
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
953 Zone* zone) {
954 DCHECK(start != Start() || end != End());
955 DCHECK(start < end);
956
957 TopLevelLiveRange splinter_temp(-1, representation());
958 UsePosition* last_in_splinter = nullptr;
959 // Live ranges defined in deferred blocks stay in deferred blocks, so we
960 // don't need to splinter them. That means that start should always be
961 // after the beginning of the range.
962 DCHECK(start > Start());
963
964 if (end >= End()) {
965 DCHECK(start > Start());
966 DetachAt(start, &splinter_temp, zone);
967 next_ = nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000969 DCHECK(start < End() && Start() < end);
970
971 const int kInvalidId = std::numeric_limits<int>::max();
972
973 UsePosition* last = DetachAt(start, &splinter_temp, zone);
974
975 LiveRange end_part(kInvalidId, this->representation(), nullptr);
976 last_in_splinter = splinter_temp.DetachAt(end, &end_part, zone);
977
978 next_ = end_part.next_;
979 last_interval_->set_next(end_part.first_interval_);
980 // The next splinter will happen either at or after the current interval.
981 // We can optimize DetachAt by setting current_interval_ accordingly,
982 // which will then be picked up by FirstSearchIntervalForPosition.
983 current_interval_ = last_interval_;
984 last_interval_ = end_part.last_interval_;
985
986 if (first_pos_ == nullptr) {
987 first_pos_ = end_part.first_pos_;
988 } else {
989 splitting_pointer_ = last;
990 if (last != nullptr) last->set_next(end_part.first_pos_);
991 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994 if (splinter()->IsEmpty()) {
995 splinter()->first_interval_ = splinter_temp.first_interval_;
996 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000997 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998 splinter()->last_interval_->set_next(splinter_temp.first_interval_);
999 splinter()->last_interval_ = splinter_temp.last_interval_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001000 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001 if (splinter()->first_pos() == nullptr) {
1002 splinter()->first_pos_ = splinter_temp.first_pos_;
1003 } else {
1004 splinter()->last_pos_->set_next(splinter_temp.first_pos_);
1005 }
1006 if (last_in_splinter != nullptr) {
1007 splinter()->last_pos_ = last_in_splinter;
1008 } else {
1009 if (splinter()->first_pos() != nullptr &&
1010 splinter()->last_pos_ == nullptr) {
1011 splinter()->last_pos_ = splinter()->first_pos();
1012 for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
1013 pos = pos->next()) {
1014 splinter()->last_pos_ = pos;
1015 }
1016 }
1017 }
1018#if DEBUG
1019 Verify();
1020 splinter()->Verify();
1021#endif
1022}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024
1025void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
1026 splintered_from_ = splinter_parent;
1027 if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
1028 SetSpillRange(splinter_parent->spill_range_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001029 }
1030}
1031
1032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001033void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
1034 DCHECK(merged->TopLevel() == this);
1035
1036 if (HasNoSpillType() && merged->HasSpillRange()) {
1037 set_spill_type(merged->spill_type());
1038 DCHECK(GetSpillRange()->live_ranges().size() > 0);
1039 merged->spill_range_ = nullptr;
1040 merged->bits_ =
1041 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001042 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043}
1044
1045
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
1047 DCHECK(Start() < other->Start());
1048 DCHECK(other->splintered_from() == this);
1049
1050 LiveRange* first = this;
1051 LiveRange* second = other;
1052 DCHECK(first->Start() < second->Start());
1053 while (first != nullptr && second != nullptr) {
1054 DCHECK(first != second);
1055 // Make sure the ranges are in order each time we iterate.
1056 if (second->Start() < first->Start()) {
1057 LiveRange* tmp = second;
1058 second = first;
1059 first = tmp;
1060 continue;
1061 }
1062
1063 if (first->End() <= second->Start()) {
1064 if (first->next() == nullptr ||
1065 first->next()->Start() > second->Start()) {
1066 // First is in order before second.
1067 LiveRange* temp = first->next();
1068 first->next_ = second;
1069 first = temp;
1070 } else {
1071 // First is in order before its successor (or second), so advance first.
1072 first = first->next();
1073 }
1074 continue;
1075 }
1076
1077 DCHECK(first->Start() < second->Start());
1078 // If first and second intersect, split first.
1079 if (first->Start() < second->End() && second->Start() < first->End()) {
1080 LiveRange* temp = first->SplitAt(second->Start(), zone);
1081 CHECK(temp != first);
1082 temp->set_spilled(first->spilled());
1083 if (!temp->spilled())
1084 temp->set_assigned_register(first->assigned_register());
1085
1086 first->next_ = second;
1087 first = temp;
1088 continue;
1089 }
1090 DCHECK(first->End() <= second->Start());
1091 }
1092
1093 TopLevel()->UpdateParentForAllChildren(TopLevel());
1094 TopLevel()->UpdateSpillRangePostMerge(other);
1095
1096#if DEBUG
1097 Verify();
1098#endif
1099}
1100
1101
1102void TopLevelLiveRange::VerifyChildrenInOrder() const {
1103 LifetimePosition last_end = End();
1104 for (const LiveRange* child = this->next(); child != nullptr;
1105 child = child->next()) {
1106 DCHECK(last_end <= child->Start());
1107 last_end = child->End();
1108 }
1109}
1110
1111
1112void TopLevelLiveRange::Verify() const {
1113 VerifyChildrenInOrder();
1114 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1115 VerifyChildStructure();
1116 }
1117}
1118
1119
1120void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1121 TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1122 DCHECK(first_interval_ != nullptr);
1123 DCHECK(first_interval_->start() <= start);
1124 DCHECK(start < first_interval_->end());
1125 first_interval_->set_start(start);
1126}
1127
1128
1129void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1130 LifetimePosition end, Zone* zone) {
1131 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1132 end.value());
1133 LifetimePosition new_end = end;
1134 while (first_interval_ != nullptr && first_interval_->start() <= end) {
1135 if (first_interval_->end() > end) {
1136 new_end = first_interval_->end();
1137 }
1138 first_interval_ = first_interval_->next();
1139 }
1140
1141 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1142 new_interval->set_next(first_interval_);
1143 first_interval_ = new_interval;
1144 if (new_interval->next() == nullptr) {
1145 last_interval_ = new_interval;
1146 }
1147}
1148
1149
1150void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1151 LifetimePosition end, Zone* zone) {
1152 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1153 end.value());
1154 if (first_interval_ == nullptr) {
1155 UseInterval* interval = new (zone) UseInterval(start, end);
1156 first_interval_ = interval;
1157 last_interval_ = interval;
1158 } else {
1159 if (end == first_interval_->start()) {
1160 first_interval_->set_start(start);
1161 } else if (end < first_interval_->start()) {
1162 UseInterval* interval = new (zone) UseInterval(start, end);
1163 interval->set_next(first_interval_);
1164 first_interval_ = interval;
1165 } else {
1166 // Order of instruction's processing (see ProcessInstructions) guarantees
1167 // that each new use interval either precedes or intersects with
1168 // last added interval.
1169 DCHECK(start < first_interval_->end());
1170 first_interval_->set_start(Min(start, first_interval_->start()));
1171 first_interval_->set_end(Max(end, first_interval_->end()));
1172 }
1173 }
1174}
1175
1176
1177void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1178 LifetimePosition pos = use_pos->pos();
1179 TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1180 UsePosition* prev_hint = nullptr;
1181 UsePosition* prev = nullptr;
1182 UsePosition* current = first_pos_;
1183 while (current != nullptr && current->pos() < pos) {
1184 prev_hint = current->HasHint() ? current : prev_hint;
1185 prev = current;
1186 current = current->next();
1187 }
1188
1189 if (prev == nullptr) {
1190 use_pos->set_next(first_pos_);
1191 first_pos_ = use_pos;
1192 } else {
1193 use_pos->set_next(prev->next());
1194 prev->set_next(use_pos);
1195 }
1196
1197 if (prev_hint == nullptr && use_pos->HasHint()) {
1198 current_hint_position_ = use_pos;
1199 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200}
1201
1202
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001203static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1204 UseInterval* interval2) {
1205 while (interval1 != nullptr && interval2 != nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001206 if (interval1->start() < interval2->start()) {
1207 if (interval1->end() > interval2->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001208 return true;
1209 }
1210 interval1 = interval1->next();
1211 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001212 if (interval2->end() > interval1->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001213 return true;
1214 }
1215 interval2 = interval2->next();
1216 }
1217 }
1218 return false;
1219}
1220
1221
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001222std::ostream& operator<<(std::ostream& os,
1223 const PrintableLiveRange& printable_range) {
1224 const LiveRange* range = printable_range.range_;
1225 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1226 << " ";
1227 if (range->TopLevel()->is_phi()) os << "phi ";
1228 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1229
1230 os << "{" << std::endl;
1231 UseInterval* interval = range->first_interval();
1232 UsePosition* use_pos = range->first_pos();
1233 PrintableInstructionOperand pio;
1234 pio.register_configuration_ = printable_range.register_configuration_;
1235 while (use_pos != nullptr) {
1236 if (use_pos->HasOperand()) {
1237 pio.op_ = *use_pos->operand();
1238 os << pio << use_pos->pos() << " ";
1239 }
1240 use_pos = use_pos->next();
1241 }
1242 os << std::endl;
1243
1244 while (interval != nullptr) {
1245 os << '[' << interval->start() << ", " << interval->end() << ')'
1246 << std::endl;
1247 interval = interval->next();
1248 }
1249 os << "}";
1250 return os;
1251}
1252
1253
1254SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1255 : live_ranges_(zone),
1256 assigned_slot_(kUnassignedSlot),
1257 byte_width_(GetByteWidth(parent->representation())),
1258 kind_(parent->kind()) {
1259 // Spill ranges are created for top level, non-splintered ranges. This is so
1260 // that, when merging decisions are made, we consider the full extent of the
1261 // virtual register, and avoid clobbering it.
1262 DCHECK(!parent->IsSplinter());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001263 UseInterval* result = nullptr;
1264 UseInterval* node = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001265 // Copy the intervals for all ranges.
1266 for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1267 UseInterval* src = range->first_interval();
1268 while (src != nullptr) {
1269 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1270 if (result == nullptr) {
1271 result = new_node;
1272 } else {
1273 node->set_next(new_node);
1274 }
1275 node = new_node;
1276 src = src->next();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001277 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001278 }
1279 use_interval_ = result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 live_ranges().push_back(parent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001281 end_position_ = node->end();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001282 parent->SetSpillRange(this);
1283}
1284
1285
1286int SpillRange::ByteWidth() const {
1287 return GetByteWidth(live_ranges_[0]->representation());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001288}
1289
1290
1291bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1292 if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001293 this->End() <= other->use_interval_->start() ||
1294 other->End() <= this->use_interval_->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001295 return false;
1296 }
1297 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1298}
1299
1300
1301bool SpillRange::TryMerge(SpillRange* other) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001302 if (HasSlot() || other->HasSlot()) return false;
1303 // TODO(dcarney): byte widths should be compared here not kinds.
1304 if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() ||
1305 IsIntersectingWith(other)) {
1306 return false;
1307 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001308
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001309 LifetimePosition max = LifetimePosition::MaxPosition();
1310 if (End() < other->End() && other->End() != max) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001311 end_position_ = other->End();
1312 }
1313 other->end_position_ = max;
1314
1315 MergeDisjointIntervals(other->use_interval_);
1316 other->use_interval_ = nullptr;
1317
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001318 for (TopLevelLiveRange* range : other->live_ranges()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001319 DCHECK(range->GetSpillRange() == other);
1320 range->SetSpillRange(this);
1321 }
1322
1323 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1324 other->live_ranges().end());
1325 other->live_ranges().clear();
1326
1327 return true;
1328}
1329
1330
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001331void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1332 UseInterval* tail = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001333 UseInterval* current = use_interval_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001334 while (other != nullptr) {
1335 // Make sure the 'current' list starts first
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 if (current == nullptr || current->start() > other->start()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001337 std::swap(current, other);
1338 }
1339 // Check disjointness
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 DCHECK(other == nullptr || current->end() <= other->start());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001341 // Append the 'current' node to the result accumulator and move forward
1342 if (tail == nullptr) {
1343 use_interval_ = current;
1344 } else {
1345 tail->set_next(current);
1346 }
1347 tail = current;
1348 current = current->next();
1349 }
1350 // Other list is empty => we are done
1351}
1352
1353
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354void SpillRange::Print() const {
1355 OFStream os(stdout);
1356 os << "{" << std::endl;
1357 for (TopLevelLiveRange* range : live_ranges()) {
1358 os << range->vreg() << " ";
1359 }
1360 os << std::endl;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001361
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001362 for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1363 os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1364 }
1365 os << "}" << std::endl;
1366}
1367
1368
1369RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1370 const InstructionBlock* block,
1371 Zone* zone)
1372 : phi_(phi),
1373 block_(block),
1374 incoming_operands_(zone),
1375 assigned_register_(kUnassignedRegister) {
1376 incoming_operands_.reserve(phi->operands().size());
1377}
1378
1379
1380void RegisterAllocationData::PhiMapValue::AddOperand(
1381 InstructionOperand* operand) {
1382 incoming_operands_.push_back(operand);
1383}
1384
1385
1386void RegisterAllocationData::PhiMapValue::CommitAssignment(
1387 const InstructionOperand& assigned) {
1388 for (InstructionOperand* operand : incoming_operands_) {
1389 InstructionOperand::ReplaceWith(operand, &assigned);
1390 }
1391}
1392
1393
1394RegisterAllocationData::RegisterAllocationData(
1395 const RegisterConfiguration* config, Zone* zone, Frame* frame,
1396 InstructionSequence* code, const char* debug_name)
1397 : allocation_zone_(zone),
1398 frame_(frame),
1399 code_(code),
1400 debug_name_(debug_name),
1401 config_(config),
1402 phi_map_(allocation_zone()),
1403 allocatable_codes_(this->config()->num_general_registers(), -1,
1404 allocation_zone()),
1405 allocatable_double_codes_(this->config()->num_double_registers(), -1,
1406 allocation_zone()),
1407 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1408 live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1409 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1410 allocation_zone()),
1411 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1412 allocation_zone()),
1413 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1414 allocation_zone()),
1415 spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1416 delayed_references_(allocation_zone()),
1417 assigned_registers_(nullptr),
1418 assigned_double_registers_(nullptr),
1419 virtual_register_count_(code->VirtualRegisterCount()),
1420 preassigned_slot_ranges_(zone) {
1421 DCHECK(this->config()->num_general_registers() <=
1422 RegisterConfiguration::kMaxGeneralRegisters);
1423 DCHECK(this->config()->num_double_registers() <=
1424 RegisterConfiguration::kMaxDoubleRegisters);
1425 assigned_registers_ = new (code_zone())
1426 BitVector(this->config()->num_general_registers(), code_zone());
1427 assigned_double_registers_ = new (code_zone())
1428 BitVector(this->config()->num_double_registers(), code_zone());
1429 this->frame()->SetAllocatedRegisters(assigned_registers_);
1430 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1431}
1432
1433
1434MoveOperands* RegisterAllocationData::AddGapMove(
1435 int index, Instruction::GapPosition position,
1436 const InstructionOperand& from, const InstructionOperand& to) {
1437 Instruction* instr = code()->InstructionAt(index);
1438 ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1439 return moves->AddMove(from, to);
1440}
1441
1442
1443MachineRepresentation RegisterAllocationData::RepresentationFor(
1444 int virtual_register) {
1445 DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1446 return code()->GetRepresentation(virtual_register);
1447}
1448
1449
1450TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1451 if (index >= static_cast<int>(live_ranges().size())) {
1452 live_ranges().resize(index + 1, nullptr);
1453 }
1454 TopLevelLiveRange* result = live_ranges()[index];
1455 if (result == nullptr) {
1456 result = NewLiveRange(index, RepresentationFor(index));
1457 live_ranges()[index] = result;
1458 }
1459 return result;
1460}
1461
1462
1463TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1464 int index, MachineRepresentation rep) {
1465 return new (allocation_zone()) TopLevelLiveRange(index, rep);
1466}
1467
1468
1469int RegisterAllocationData::GetNextLiveRangeId() {
1470 int vreg = virtual_register_count_++;
1471 if (vreg >= static_cast<int>(live_ranges().size())) {
1472 live_ranges().resize(vreg + 1, nullptr);
1473 }
1474 return vreg;
1475}
1476
1477
1478TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1479 MachineRepresentation rep) {
1480 int vreg = GetNextLiveRangeId();
1481 TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1482 return ret;
1483}
1484
1485
1486RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1487 const InstructionBlock* block, PhiInstruction* phi) {
1488 RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1489 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1490 auto res =
1491 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1492 DCHECK(res.second);
1493 USE(res);
1494 return map_value;
1495}
1496
1497
1498RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1499 int virtual_register) {
1500 auto it = phi_map_.find(virtual_register);
1501 DCHECK(it != phi_map_.end());
1502 return it->second;
1503}
1504
1505
1506RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1507 TopLevelLiveRange* top_range) {
1508 return GetPhiMapValueFor(top_range->vreg());
1509}
1510
1511
1512bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1513 bool found = false;
1514 BitVector::Iterator iterator(live_in_sets()[0]);
1515 while (!iterator.Done()) {
1516 found = true;
1517 int operand_index = iterator.Current();
1518 PrintF("Register allocator error: live v%d reached first block.\n",
1519 operand_index);
1520 LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1521 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1522 if (debug_name() == nullptr) {
1523 PrintF("\n");
1524 } else {
1525 PrintF(" (function: %s)\n", debug_name());
1526 }
1527 iterator.Advance();
1528 }
1529 return found;
1530}
1531
1532
1533// If a range is defined in a deferred block, we can expect all the range
1534// to only cover positions in deferred blocks. Otherwise, a block on the
1535// hot path would be dominated by a deferred block, meaning it is unreachable
1536// without passing through the deferred block, which is contradictory.
1537// In particular, when such a range contributes a result back on the hot
1538// path, it will be as one of the inputs of a phi. In that case, the value
1539// will be transferred via a move in the Gap::END's of the last instruction
1540// of a deferred block.
1541bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1542 for (const TopLevelLiveRange* range : live_ranges()) {
1543 if (range == nullptr || range->IsEmpty() ||
1544 !code()
1545 ->GetInstructionBlock(range->Start().ToInstructionIndex())
1546 ->IsDeferred()) {
1547 continue;
1548 }
1549 for (const UseInterval* i = range->first_interval(); i != nullptr;
1550 i = i->next()) {
1551 int first = i->FirstGapIndex();
1552 int last = i->LastGapIndex();
1553 for (int instr = first; instr <= last;) {
1554 const InstructionBlock* block = code()->GetInstructionBlock(instr);
1555 if (!block->IsDeferred()) return false;
1556 instr = block->last_instruction_index() + 1;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001557 }
1558 }
1559 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001561}
1562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1564 TopLevelLiveRange* range) {
1565 DCHECK(!range->HasSpillOperand());
1566
1567 SpillRange* spill_range = range->GetAllocatedSpillRange();
1568 if (spill_range == nullptr) {
1569 DCHECK(!range->IsSplinter());
1570 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001571 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001573
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574 int spill_range_index =
1575 range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001576
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 spill_ranges()[spill_range_index] = spill_range;
1578
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001579 return spill_range;
1580}
1581
1582
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1584 TopLevelLiveRange* range) {
1585 DCHECK(!range->HasSpillOperand());
1586 DCHECK(!range->IsSplinter());
1587 SpillRange* spill_range =
1588 new (allocation_zone()) SpillRange(range, allocation_zone());
1589 return spill_range;
1590}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001591
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592
1593void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
1594 if (kind == DOUBLE_REGISTERS) {
1595 assigned_double_registers_->Add(index);
1596 } else {
1597 DCHECK(kind == GENERAL_REGISTERS);
1598 assigned_registers_->Add(index);
1599 }
1600}
1601
1602
1603bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1604 return pos.IsFullStart() &&
1605 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1606 pos.ToInstructionIndex();
1607}
1608
1609
1610ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1611 : data_(data) {}
1612
1613
1614InstructionOperand* ConstraintBuilder::AllocateFixed(
1615 UnallocatedOperand* operand, int pos, bool is_tagged) {
1616 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1617 DCHECK(operand->HasFixedPolicy());
1618 InstructionOperand allocated;
1619 MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1620 int virtual_register = operand->virtual_register();
1621 if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1622 rep = data()->RepresentationFor(virtual_register);
1623 }
1624 if (operand->HasFixedSlotPolicy()) {
1625 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1626 operand->fixed_slot_index());
1627 } else if (operand->HasFixedRegisterPolicy()) {
1628 DCHECK(!IsFloatingPoint(rep));
1629 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1630 operand->fixed_register_index());
1631 } else if (operand->HasFixedDoubleRegisterPolicy()) {
1632 DCHECK(IsFloatingPoint(rep));
1633 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1634 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1635 operand->fixed_register_index());
1636 } else {
1637 UNREACHABLE();
1638 }
1639 InstructionOperand::ReplaceWith(operand, &allocated);
1640 if (is_tagged) {
1641 TRACE("Fixed reg is tagged at %d\n", pos);
1642 Instruction* instr = code()->InstructionAt(pos);
1643 if (instr->HasReferenceMap()) {
1644 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1645 }
1646 }
1647 return operand;
1648}
1649
1650
1651void ConstraintBuilder::MeetRegisterConstraints() {
1652 for (InstructionBlock* block : code()->instruction_blocks()) {
1653 MeetRegisterConstraints(block);
1654 }
1655}
1656
1657
1658void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1659 int start = block->first_instruction_index();
1660 int end = block->last_instruction_index();
1661 DCHECK_NE(-1, start);
1662 for (int i = start; i <= end; ++i) {
1663 MeetConstraintsBefore(i);
1664 if (i != end) MeetConstraintsAfter(i);
1665 }
1666 // Meet register constraints for the instruction in the end.
1667 MeetRegisterConstraintsForLastInstructionInBlock(block);
1668}
1669
1670
1671void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1672 const InstructionBlock* block) {
1673 int end = block->last_instruction_index();
1674 Instruction* last_instruction = code()->InstructionAt(end);
1675 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1676 InstructionOperand* output_operand = last_instruction->OutputAt(i);
1677 DCHECK(!output_operand->IsConstant());
1678 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1679 int output_vreg = output->virtual_register();
1680 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1681 bool assigned = false;
1682 if (output->HasFixedPolicy()) {
1683 AllocateFixed(output, -1, false);
1684 // This value is produced on the stack, we never need to spill it.
1685 if (output->IsStackSlot()) {
1686 DCHECK(LocationOperand::cast(output)->index() <
1687 data()->frame()->GetSpillSlotCount());
1688 range->SetSpillOperand(LocationOperand::cast(output));
1689 range->SetSpillStartIndex(end);
1690 assigned = true;
1691 }
1692
1693 for (const RpoNumber& succ : block->successors()) {
1694 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1695 DCHECK(successor->PredecessorCount() == 1);
1696 int gap_index = successor->first_instruction_index();
1697 // Create an unconstrained operand for the same virtual register
1698 // and insert a gap move from the fixed output to the operand.
1699 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1700 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1701 }
1702 }
1703
1704 if (!assigned) {
1705 for (const RpoNumber& succ : block->successors()) {
1706 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1707 DCHECK(successor->PredecessorCount() == 1);
1708 int gap_index = successor->first_instruction_index();
1709 range->RecordSpillLocation(allocation_zone(), gap_index, output);
1710 range->SetSpillStartIndex(gap_index);
1711 }
1712 }
1713 }
1714}
1715
1716
1717void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1718 Instruction* first = code()->InstructionAt(instr_index);
1719 // Handle fixed temporaries.
1720 for (size_t i = 0; i < first->TempCount(); i++) {
1721 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1722 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1723 }
1724 // Handle constant/fixed output operands.
1725 for (size_t i = 0; i < first->OutputCount(); i++) {
1726 InstructionOperand* output = first->OutputAt(i);
1727 if (output->IsConstant()) {
1728 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1729 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1730 range->SetSpillStartIndex(instr_index + 1);
1731 range->SetSpillOperand(output);
1732 continue;
1733 }
1734 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1735 TopLevelLiveRange* range =
1736 data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1737 bool assigned = false;
1738 if (first_output->HasFixedPolicy()) {
1739 int output_vreg = first_output->virtual_register();
1740 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1741 bool is_tagged = code()->IsReference(output_vreg);
1742 if (first_output->HasSecondaryStorage()) {
1743 range->MarkHasPreassignedSlot();
1744 data()->preassigned_slot_ranges().push_back(
1745 std::make_pair(range, first_output->GetSecondaryStorage()));
1746 }
1747 AllocateFixed(first_output, instr_index, is_tagged);
1748
1749 // This value is produced on the stack, we never need to spill it.
1750 if (first_output->IsStackSlot()) {
1751 DCHECK(LocationOperand::cast(first_output)->index() <
1752 data()->frame()->GetTotalFrameSlotCount());
1753 range->SetSpillOperand(LocationOperand::cast(first_output));
1754 range->SetSpillStartIndex(instr_index + 1);
1755 assigned = true;
1756 }
1757 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1758 output_copy);
1759 }
1760 // Make sure we add a gap move for spilling (if we have not done
1761 // so already).
1762 if (!assigned) {
1763 range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1764 first_output);
1765 range->SetSpillStartIndex(instr_index + 1);
1766 }
1767 }
1768}
1769
1770
1771void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1772 Instruction* second = code()->InstructionAt(instr_index);
1773 // Handle fixed input operands of second instruction.
1774 for (size_t i = 0; i < second->InputCount(); i++) {
1775 InstructionOperand* input = second->InputAt(i);
1776 if (input->IsImmediate() || input->IsExplicit()) {
1777 continue; // Ignore immediates and explicitly reserved registers.
1778 }
1779 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1780 if (cur_input->HasFixedPolicy()) {
1781 int input_vreg = cur_input->virtual_register();
1782 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1783 bool is_tagged = code()->IsReference(input_vreg);
1784 AllocateFixed(cur_input, instr_index, is_tagged);
1785 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1786 }
1787 }
1788 // Handle "output same as input" for second instruction.
1789 for (size_t i = 0; i < second->OutputCount(); i++) {
1790 InstructionOperand* output = second->OutputAt(i);
1791 if (!output->IsUnallocated()) continue;
1792 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1793 if (!second_output->HasSameAsInputPolicy()) continue;
1794 DCHECK(i == 0); // Only valid for first output.
1795 UnallocatedOperand* cur_input =
1796 UnallocatedOperand::cast(second->InputAt(0));
1797 int output_vreg = second_output->virtual_register();
1798 int input_vreg = cur_input->virtual_register();
1799 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1800 cur_input->set_virtual_register(second_output->virtual_register());
1801 MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1802 input_copy, *cur_input);
1803 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1804 if (second->HasReferenceMap()) {
1805 RegisterAllocationData::DelayedReference delayed_reference = {
1806 second->reference_map(), &gap_move->source()};
1807 data()->delayed_references().push_back(delayed_reference);
1808 }
1809 } else if (!code()->IsReference(input_vreg) &&
1810 code()->IsReference(output_vreg)) {
1811 // The input is assumed to immediately have a tagged representation,
1812 // before the pointer map can be used. I.e. the pointer map at the
1813 // instruction will include the output operand (whose value at the
1814 // beginning of the instruction is equal to the input operand). If
1815 // this is not desired, then the pointer map at this instruction needs
1816 // to be adjusted manually.
1817 }
1818 }
1819}
1820
1821
1822void ConstraintBuilder::ResolvePhis() {
1823 // Process the blocks in reverse order.
1824 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1825 ResolvePhis(block);
1826 }
1827}
1828
1829
1830void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1831 for (PhiInstruction* phi : block->phis()) {
1832 int phi_vreg = phi->virtual_register();
1833 RegisterAllocationData::PhiMapValue* map_value =
1834 data()->InitializePhiMap(block, phi);
1835 InstructionOperand& output = phi->output();
1836 // Map the destination operands, so the commitment phase can find them.
1837 for (size_t i = 0; i < phi->operands().size(); ++i) {
1838 InstructionBlock* cur_block =
1839 code()->InstructionBlockAt(block->predecessors()[i]);
1840 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1841 MoveOperands* move = data()->AddGapMove(
1842 cur_block->last_instruction_index(), Instruction::END, input, output);
1843 map_value->AddOperand(&move->destination());
1844 DCHECK(!code()
1845 ->InstructionAt(cur_block->last_instruction_index())
1846 ->HasReferenceMap());
1847 }
1848 TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1849 int gap_index = block->first_instruction_index();
1850 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1851 live_range->SetSpillStartIndex(gap_index);
1852 // We use the phi-ness of some nodes in some later heuristics.
1853 live_range->set_is_phi(true);
1854 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1855 }
1856}
1857
1858
1859LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1860 Zone* local_zone)
1861 : data_(data), phi_hints_(local_zone) {}
1862
1863
1864BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1865 RegisterAllocationData* data) {
1866 size_t block_index = block->rpo_number().ToSize();
1867 BitVector* live_out = data->live_out_sets()[block_index];
1868 if (live_out == nullptr) {
1869 // Compute live out for the given block, except not including backward
1870 // successor edges.
1871 Zone* zone = data->allocation_zone();
1872 const InstructionSequence* code = data->code();
1873
1874 live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1875
1876 // Process all successor blocks.
1877 for (const RpoNumber& succ : block->successors()) {
1878 // Add values live on entry to the successor.
1879 if (succ <= block->rpo_number()) continue;
1880 BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1881 if (live_in != nullptr) live_out->Union(*live_in);
1882
1883 // All phi input operands corresponding to this successor edge are live
1884 // out from this block.
1885 const InstructionBlock* successor = code->InstructionBlockAt(succ);
1886 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1887 DCHECK(index < successor->PredecessorCount());
1888 for (PhiInstruction* phi : successor->phis()) {
1889 live_out->Add(phi->operands()[index]);
1890 }
1891 }
1892 data->live_out_sets()[block_index] = live_out;
1893 }
1894 return live_out;
1895}
1896
1897
1898void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1899 BitVector* live_out) {
1900 // Add an interval that includes the entire block to the live range for
1901 // each live_out value.
1902 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1903 block->first_instruction_index());
1904 LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1905 block->last_instruction_index())
1906 .NextStart();
1907 BitVector::Iterator iterator(live_out);
1908 while (!iterator.Done()) {
1909 int operand_index = iterator.Current();
1910 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1911 range->AddUseInterval(start, end, allocation_zone());
1912 iterator.Advance();
1913 }
1914}
1915
1916
1917int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
1918 return -index - 1 - config()->num_general_registers();
1919}
1920
1921
1922TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1923 DCHECK(index < config()->num_general_registers());
1924 TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1925 if (result == nullptr) {
1926 result = data()->NewLiveRange(FixedLiveRangeID(index),
1927 InstructionSequence::DefaultRepresentation());
1928 DCHECK(result->IsFixed());
1929 result->set_assigned_register(index);
1930 data()->MarkAllocated(GENERAL_REGISTERS, index);
1931 data()->fixed_live_ranges()[index] = result;
1932 }
1933 return result;
1934}
1935
1936
1937TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
1938 DCHECK(index < config()->num_double_registers());
1939 TopLevelLiveRange* result = data()->fixed_double_live_ranges()[index];
1940 if (result == nullptr) {
1941 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index),
1942 MachineRepresentation::kFloat64);
1943 DCHECK(result->IsFixed());
1944 result->set_assigned_register(index);
1945 data()->MarkAllocated(DOUBLE_REGISTERS, index);
1946 data()->fixed_double_live_ranges()[index] = result;
1947 }
1948 return result;
1949}
1950
1951
1952TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1953 if (operand->IsUnallocated()) {
1954 return data()->GetOrCreateLiveRangeFor(
1955 UnallocatedOperand::cast(operand)->virtual_register());
1956 } else if (operand->IsConstant()) {
1957 return data()->GetOrCreateLiveRangeFor(
1958 ConstantOperand::cast(operand)->virtual_register());
1959 } else if (operand->IsRegister()) {
1960 return FixedLiveRangeFor(
1961 LocationOperand::cast(operand)->GetRegister().code());
1962 } else if (operand->IsDoubleRegister()) {
1963 return FixedDoubleLiveRangeFor(
1964 LocationOperand::cast(operand)->GetDoubleRegister().code());
1965 } else {
1966 return nullptr;
1967 }
1968}
1969
1970
1971UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
1972 InstructionOperand* operand,
1973 void* hint,
1974 UsePositionHintType hint_type) {
1975 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1976}
1977
1978
1979UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1980 InstructionOperand* operand, void* hint,
1981 UsePositionHintType hint_type) {
1982 TopLevelLiveRange* range = LiveRangeFor(operand);
1983 if (range == nullptr) return nullptr;
1984
1985 if (range->IsEmpty() || range->Start() > position) {
1986 // Can happen if there is a definition without use.
1987 range->AddUseInterval(position, position.NextStart(), allocation_zone());
1988 range->AddUsePosition(NewUsePosition(position.NextStart()));
1989 } else {
1990 range->ShortenTo(position);
1991 }
1992 if (!operand->IsUnallocated()) return nullptr;
1993 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1994 UsePosition* use_pos =
1995 NewUsePosition(position, unalloc_operand, hint, hint_type);
1996 range->AddUsePosition(use_pos);
1997 return use_pos;
1998}
1999
2000
2001UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
2002 LifetimePosition position,
2003 InstructionOperand* operand, void* hint,
2004 UsePositionHintType hint_type) {
2005 TopLevelLiveRange* range = LiveRangeFor(operand);
2006 if (range == nullptr) return nullptr;
2007 UsePosition* use_pos = nullptr;
2008 if (operand->IsUnallocated()) {
2009 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2010 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2011 range->AddUsePosition(use_pos);
2012 }
2013 range->AddUseInterval(block_start, position, allocation_zone());
2014 return use_pos;
2015}
2016
2017
2018void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2019 BitVector* live) {
2020 int block_start = block->first_instruction_index();
2021 LifetimePosition block_start_position =
2022 LifetimePosition::GapFromInstructionIndex(block_start);
2023
2024 for (int index = block->last_instruction_index(); index >= block_start;
2025 index--) {
2026 LifetimePosition curr_position =
2027 LifetimePosition::InstructionFromInstructionIndex(index);
2028 Instruction* instr = code()->InstructionAt(index);
2029 DCHECK(instr != nullptr);
2030 DCHECK(curr_position.IsInstructionPosition());
2031 // Process output, inputs, and temps of this instruction.
2032 for (size_t i = 0; i < instr->OutputCount(); i++) {
2033 InstructionOperand* output = instr->OutputAt(i);
2034 if (output->IsUnallocated()) {
2035 // Unsupported.
2036 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2037 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2038 live->Remove(out_vreg);
2039 } else if (output->IsConstant()) {
2040 int out_vreg = ConstantOperand::cast(output)->virtual_register();
2041 live->Remove(out_vreg);
2042 }
2043 if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2044 output->IsRegister() &&
2045 AllocatedOperand::cast(output)->GetRegister().is(
2046 v8::internal::kReturnRegister0)) {
2047 // The register defined here is blocked from gap start - it is the
2048 // exception value.
2049 // TODO(mtrofin): should we explore an explicit opcode for
2050 // the first instruction in the handler?
2051 Define(LifetimePosition::GapFromInstructionIndex(index), output);
2052 } else {
2053 Define(curr_position, output);
2054 }
2055 }
2056
2057 if (instr->ClobbersRegisters()) {
2058 for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2059 int code = config()->GetAllocatableGeneralCode(i);
2060 if (!IsOutputRegisterOf(instr, Register::from_code(code))) {
2061 TopLevelLiveRange* range = FixedLiveRangeFor(code);
2062 range->AddUseInterval(curr_position, curr_position.End(),
2063 allocation_zone());
2064 }
2065 }
2066 }
2067
2068 if (instr->ClobbersDoubleRegisters()) {
2069 for (int i = 0; i < config()->num_allocatable_aliased_double_registers();
2070 ++i) {
2071 int code = config()->GetAllocatableDoubleCode(i);
2072 if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) {
2073 TopLevelLiveRange* range = FixedDoubleLiveRangeFor(code);
2074 range->AddUseInterval(curr_position, curr_position.End(),
2075 allocation_zone());
2076 }
2077 }
2078 }
2079
2080 for (size_t i = 0; i < instr->InputCount(); i++) {
2081 InstructionOperand* input = instr->InputAt(i);
2082 if (input->IsImmediate() || input->IsExplicit()) {
2083 continue; // Ignore immediates and explicitly reserved registers.
2084 }
2085 LifetimePosition use_pos;
2086 if (input->IsUnallocated() &&
2087 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2088 use_pos = curr_position;
2089 } else {
2090 use_pos = curr_position.End();
2091 }
2092
2093 if (input->IsUnallocated()) {
2094 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2095 int vreg = unalloc->virtual_register();
2096 live->Add(vreg);
2097 if (unalloc->HasSlotPolicy()) {
2098 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2099 }
2100 }
2101 Use(block_start_position, use_pos, input);
2102 }
2103
2104 for (size_t i = 0; i < instr->TempCount(); i++) {
2105 InstructionOperand* temp = instr->TempAt(i);
2106 // Unsupported.
2107 DCHECK_IMPLIES(temp->IsUnallocated(),
2108 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2109 if (instr->ClobbersTemps()) {
2110 if (temp->IsRegister()) continue;
2111 if (temp->IsUnallocated()) {
2112 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2113 if (temp_unalloc->HasFixedPolicy()) {
2114 continue;
2115 }
2116 }
2117 }
2118 Use(block_start_position, curr_position.End(), temp);
2119 Define(curr_position, temp);
2120 }
2121
2122 // Process the moves of the instruction's gaps, making their sources live.
2123 const Instruction::GapPosition kPositions[] = {Instruction::END,
2124 Instruction::START};
2125 curr_position = curr_position.PrevStart();
2126 DCHECK(curr_position.IsGapPosition());
2127 for (const Instruction::GapPosition& position : kPositions) {
2128 ParallelMove* move = instr->GetParallelMove(position);
2129 if (move == nullptr) continue;
2130 if (position == Instruction::END) {
2131 curr_position = curr_position.End();
2132 } else {
2133 curr_position = curr_position.Start();
2134 }
2135 for (MoveOperands* cur : *move) {
2136 InstructionOperand& from = cur->source();
2137 InstructionOperand& to = cur->destination();
2138 void* hint = &to;
2139 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2140 UsePosition* to_use = nullptr;
2141 int phi_vreg = -1;
2142 if (to.IsUnallocated()) {
2143 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2144 TopLevelLiveRange* to_range =
2145 data()->GetOrCreateLiveRangeFor(to_vreg);
2146 if (to_range->is_phi()) {
2147 phi_vreg = to_vreg;
2148 if (to_range->is_non_loop_phi()) {
2149 hint = to_range->current_hint_position();
2150 hint_type = hint == nullptr ? UsePositionHintType::kNone
2151 : UsePositionHintType::kUsePos;
2152 } else {
2153 hint_type = UsePositionHintType::kPhi;
2154 hint = data()->GetPhiMapValueFor(to_vreg);
2155 }
2156 } else {
2157 if (live->Contains(to_vreg)) {
2158 to_use = Define(curr_position, &to, &from,
2159 UsePosition::HintTypeForOperand(from));
2160 live->Remove(to_vreg);
2161 } else {
2162 cur->Eliminate();
2163 continue;
2164 }
2165 }
2166 } else {
2167 Define(curr_position, &to);
2168 }
2169 UsePosition* from_use =
2170 Use(block_start_position, curr_position, &from, hint, hint_type);
2171 // Mark range live.
2172 if (from.IsUnallocated()) {
2173 live->Add(UnallocatedOperand::cast(from).virtual_register());
2174 }
2175 // Resolve use position hints just created.
2176 if (to_use != nullptr && from_use != nullptr) {
2177 to_use->ResolveHint(from_use);
2178 from_use->ResolveHint(to_use);
2179 }
2180 DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2181 DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2182 // Potentially resolve phi hint.
2183 if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2184 }
2185 }
2186 }
2187}
2188
2189
2190void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2191 BitVector* live) {
2192 for (PhiInstruction* phi : block->phis()) {
2193 // The live range interval already ends at the first instruction of the
2194 // block.
2195 int phi_vreg = phi->virtual_register();
2196 live->Remove(phi_vreg);
2197 InstructionOperand* hint = nullptr;
Ben Murdochda12d292016-06-02 14:46:10 +01002198 const InstructionBlock::Predecessors& predecessors = block->predecessors();
2199 const InstructionBlock* predecessor_block =
2200 code()->InstructionBlockAt(predecessors[0]);
2201 const Instruction* instr = GetLastInstruction(code(), predecessor_block);
2202 if (predecessor_block->IsDeferred()) {
2203 // "Prefer the hint from the first non-deferred predecessor, if any.
2204 for (size_t i = 1; i < predecessors.size(); ++i) {
2205 predecessor_block = code()->InstructionBlockAt(predecessors[i]);
2206 if (!predecessor_block->IsDeferred()) {
2207 instr = GetLastInstruction(code(), predecessor_block);
2208 break;
2209 }
2210 }
2211 }
2212 DCHECK_NOT_NULL(instr);
2213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214 for (MoveOperands* move : *instr->GetParallelMove(Instruction::END)) {
2215 InstructionOperand& to = move->destination();
2216 if (to.IsUnallocated() &&
2217 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2218 hint = &move->source();
2219 break;
2220 }
2221 }
2222 DCHECK(hint != nullptr);
2223 LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2224 block->first_instruction_index());
2225 UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2226 UsePosition::HintTypeForOperand(*hint));
2227 MapPhiHint(hint, use_pos);
2228 }
2229}
2230
2231
2232void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2233 BitVector* live) {
2234 DCHECK(block->IsLoopHeader());
2235 // Add a live range stretching from the first loop instruction to the last
2236 // for each value live on entry to the header.
2237 BitVector::Iterator iterator(live);
2238 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2239 block->first_instruction_index());
2240 LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2241 code()->LastLoopInstructionIndex(block))
2242 .NextFullStart();
2243 while (!iterator.Done()) {
2244 int operand_index = iterator.Current();
2245 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2246 range->EnsureInterval(start, end, allocation_zone());
2247 iterator.Advance();
2248 }
2249 // Insert all values into the live in sets of all blocks in the loop.
2250 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2251 ++i) {
2252 live_in_sets()[i]->Union(*live);
2253 }
2254}
2255
2256
2257void LiveRangeBuilder::BuildLiveRanges() {
2258 // Process the blocks in reverse order.
2259 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2260 --block_id) {
2261 InstructionBlock* block =
2262 code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2263 BitVector* live = ComputeLiveOut(block, data());
2264 // Initially consider all live_out values live for the entire block. We
2265 // will shorten these intervals if necessary.
2266 AddInitialIntervals(block, live);
2267 // Process the instructions in reverse order, generating and killing
2268 // live values.
2269 ProcessInstructions(block, live);
2270 // All phi output operands are killed by this block.
2271 ProcessPhis(block, live);
2272 // Now live is live_in for this block except not including values live
2273 // out on backward successor edges.
2274 if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2275 live_in_sets()[block_id] = live;
2276 }
2277 // Postprocess the ranges.
2278 for (TopLevelLiveRange* range : data()->live_ranges()) {
2279 if (range == nullptr) continue;
2280 // Give slots to all ranges with a non fixed slot use.
2281 if (range->has_slot_use() && range->HasNoSpillType()) {
2282 data()->AssignSpillRangeToLiveRange(range);
2283 }
2284 // TODO(bmeurer): This is a horrible hack to make sure that for constant
2285 // live ranges, every use requires the constant to be in a register.
2286 // Without this hack, all uses with "any" policy would get the constant
2287 // operand assigned.
2288 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2289 for (UsePosition* pos = range->first_pos(); pos != nullptr;
2290 pos = pos->next()) {
2291 if (pos->type() == UsePositionType::kRequiresSlot) continue;
2292 UsePositionType new_type = UsePositionType::kAny;
2293 // Can't mark phis as needing a register.
2294 if (!pos->pos().IsGapPosition()) {
2295 new_type = UsePositionType::kRequiresRegister;
2296 }
2297 pos->set_type(new_type, true);
2298 }
2299 }
2300 }
2301 for (auto preassigned : data()->preassigned_slot_ranges()) {
2302 TopLevelLiveRange* range = preassigned.first;
2303 int slot_id = preassigned.second;
2304 SpillRange* spill = range->HasSpillRange()
2305 ? range->GetSpillRange()
2306 : data()->AssignSpillRangeToLiveRange(range);
2307 spill->set_assigned_slot(slot_id);
2308 }
2309#ifdef DEBUG
2310 Verify();
2311#endif
2312}
2313
2314
2315void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2316 UsePosition* use_pos) {
2317 DCHECK(!use_pos->IsResolved());
2318 auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2319 DCHECK(res.second);
2320 USE(res);
2321}
2322
2323
2324void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2325 UsePosition* use_pos) {
2326 auto it = phi_hints_.find(operand);
2327 if (it == phi_hints_.end()) return;
2328 DCHECK(!it->second->IsResolved());
2329 it->second->ResolveHint(use_pos);
2330}
2331
2332
2333void LiveRangeBuilder::Verify() const {
2334 for (auto& hint : phi_hints_) {
2335 CHECK(hint.second->IsResolved());
2336 }
Ben Murdochda12d292016-06-02 14:46:10 +01002337 for (const TopLevelLiveRange* current : data()->live_ranges()) {
2338 if (current != nullptr && !current->IsEmpty()) {
2339 // New LiveRanges should not be split.
2340 CHECK_NULL(current->next());
2341 // General integrity check.
2342 current->Verify();
2343 const UseInterval* first = current->first_interval();
2344 if (first->next() == nullptr) continue;
2345
2346 // Consecutive intervals should not end and start in the same block,
2347 // otherwise the intervals should have been joined, because the
2348 // variable is live throughout that block.
2349 CHECK(NextIntervalStartsInDifferentBlocks(first));
2350
2351 for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
2352 // Except for the first interval, the other intevals must start at
2353 // a block boundary, otherwise data wouldn't flow to them.
2354 CHECK(IntervalStartsAtBlockBoundary(i));
2355 // The last instruction of the predecessors of the block the interval
2356 // starts must be covered by the range.
2357 CHECK(IntervalPredecessorsCoveredByRange(i, current));
2358 if (i->next() != nullptr) {
2359 // Check the consecutive intervals property, except for the last
2360 // interval, where it doesn't apply.
2361 CHECK(NextIntervalStartsInDifferentBlocks(i));
2362 }
2363 }
2364 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002365 }
2366}
2367
Ben Murdochda12d292016-06-02 14:46:10 +01002368bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2369 const UseInterval* interval) const {
2370 LifetimePosition start = interval->start();
2371 if (!start.IsFullStart()) return false;
2372 int instruction_index = start.ToInstructionIndex();
2373 const InstructionBlock* block =
2374 data()->code()->GetInstructionBlock(instruction_index);
2375 return block->first_instruction_index() == instruction_index;
2376}
2377
2378bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2379 const UseInterval* interval, const TopLevelLiveRange* range) const {
2380 LifetimePosition start = interval->start();
2381 int instruction_index = start.ToInstructionIndex();
2382 const InstructionBlock* block =
2383 data()->code()->GetInstructionBlock(instruction_index);
2384 for (RpoNumber pred_index : block->predecessors()) {
2385 const InstructionBlock* predecessor =
2386 data()->code()->InstructionBlockAt(pred_index);
2387 LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2388 predecessor->last_instruction_index());
2389 last_pos = last_pos.NextStart().End();
2390 if (!range->Covers(last_pos)) return false;
2391 }
2392 return true;
2393}
2394
2395bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2396 const UseInterval* interval) const {
2397 DCHECK_NOT_NULL(interval->next());
2398 LifetimePosition end = interval->end();
2399 LifetimePosition next_start = interval->next()->start();
2400 // Since end is not covered, but the previous position is, move back a
2401 // position
2402 end = end.IsStart() ? end.PrevStart().End() : end.Start();
2403 int last_covered_index = end.ToInstructionIndex();
2404 const InstructionBlock* block =
2405 data()->code()->GetInstructionBlock(last_covered_index);
2406 const InstructionBlock* next_block =
2407 data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2408 return block->rpo_number() < next_block->rpo_number();
2409}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002410
2411RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2412 RegisterKind kind)
2413 : data_(data),
2414 mode_(kind),
2415 num_registers_(GetRegisterCount(data->config(), kind)),
2416 num_allocatable_registers_(
2417 GetAllocatableRegisterCount(data->config(), kind)),
2418 allocatable_register_codes_(
2419 GetAllocatableRegisterCodes(data->config(), kind)) {}
2420
2421
2422LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2423 const LiveRange* range, int instruction_index) {
2424 LifetimePosition ret = LifetimePosition::Invalid();
2425
2426 ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2427 if (range->Start() >= ret || ret >= range->End()) {
2428 return LifetimePosition::Invalid();
2429 }
2430 return ret;
2431}
2432
2433
2434void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand(
2435 bool operands_only) {
2436 size_t initial_range_count = data()->live_ranges().size();
2437 for (size_t i = 0; i < initial_range_count; ++i) {
2438 TopLevelLiveRange* range = data()->live_ranges()[i];
2439 if (!CanProcessRange(range)) continue;
2440 if (range->HasNoSpillType() || (operands_only && range->HasSpillRange())) {
2441 continue;
2442 }
2443
2444 LifetimePosition start = range->Start();
2445 TRACE("Live range %d:%d is defined by a spill operand.\n",
2446 range->TopLevel()->vreg(), range->relative_id());
2447 LifetimePosition next_pos = start;
2448 if (next_pos.IsGapPosition()) {
2449 next_pos = next_pos.NextStart();
2450 }
2451 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2452 // If the range already has a spill operand and it doesn't need a
2453 // register immediately, split it and spill the first part of the range.
2454 if (pos == nullptr) {
2455 Spill(range);
2456 } else if (pos->pos() > range->Start().NextStart()) {
2457 // Do not spill live range eagerly if use position that can benefit from
2458 // the register is too close to the start of live range.
2459 LifetimePosition split_pos = GetSplitPositionForInstruction(
2460 range, pos->pos().ToInstructionIndex());
2461 // There is no place to split, so we can't split and spill.
2462 if (!split_pos.IsValid()) continue;
2463
2464 split_pos =
2465 FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2466
2467 SplitRangeAt(range, split_pos);
2468 Spill(range);
2469 }
2470 }
2471}
2472
2473
2474LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2475 LifetimePosition pos) {
2476 DCHECK(!range->TopLevel()->IsFixed());
2477 TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2478 range->relative_id(), pos.value());
2479
2480 if (pos <= range->Start()) return range;
2481
2482 // We can't properly connect liveranges if splitting occurred at the end
2483 // a block.
2484 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2485 (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2486 pos.ToInstructionIndex()));
2487
2488 LiveRange* result = range->SplitAt(pos, allocation_zone());
2489 return result;
2490}
2491
2492
2493LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2494 LifetimePosition start,
2495 LifetimePosition end) {
2496 DCHECK(!range->TopLevel()->IsFixed());
2497 TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2498 range->TopLevel()->vreg(), range->relative_id(), start.value(),
2499 end.value());
2500
2501 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2502 DCHECK(split_pos >= start);
2503 return SplitRangeAt(range, split_pos);
2504}
2505
2506
2507LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2508 LifetimePosition end) {
2509 int start_instr = start.ToInstructionIndex();
2510 int end_instr = end.ToInstructionIndex();
2511 DCHECK(start_instr <= end_instr);
2512
2513 // We have no choice
2514 if (start_instr == end_instr) return end;
2515
2516 const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2517 const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2518
2519 if (end_block == start_block) {
2520 // The interval is split in the same basic block. Split at the latest
2521 // possible position.
2522 return end;
2523 }
2524
2525 const InstructionBlock* block = end_block;
2526 // Find header of outermost loop.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002527 do {
2528 const InstructionBlock* loop = GetContainingLoop(code(), block);
2529 if (loop == nullptr ||
2530 loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2531 // No more loops or loop starts before the lifetime start.
2532 break;
2533 }
2534 block = loop;
2535 } while (true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002536
2537 // We did not find any suitable outer loop. Split at the latest possible
2538 // position unless end_block is a loop header itself.
2539 if (block == end_block && !end_block->IsLoopHeader()) return end;
2540
2541 return LifetimePosition::GapFromInstructionIndex(
2542 block->first_instruction_index());
2543}
2544
2545
2546LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2547 LiveRange* range, LifetimePosition pos) {
2548 const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2549 const InstructionBlock* loop_header =
2550 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2551
2552 if (loop_header == nullptr) return pos;
2553
2554 const UsePosition* prev_use =
2555 range->PreviousUsePositionRegisterIsBeneficial(pos);
2556
2557 while (loop_header != nullptr) {
2558 // We are going to spill live range inside the loop.
2559 // If possible try to move spilling position backwards to loop header.
2560 // This will reduce number of memory moves on the back edge.
2561 LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2562 loop_header->first_instruction_index());
2563
2564 if (range->Covers(loop_start)) {
2565 if (prev_use == nullptr || prev_use->pos() < loop_start) {
2566 // No register beneficial use inside the loop before the pos.
2567 pos = loop_start;
2568 }
2569 }
2570
2571 // Try hoisting out to an outer loop.
2572 loop_header = GetContainingLoop(code(), loop_header);
2573 }
2574
2575 return pos;
2576}
2577
2578
2579void RegisterAllocator::Spill(LiveRange* range) {
2580 DCHECK(!range->spilled());
2581 TopLevelLiveRange* first = range->TopLevel();
2582 TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2583
2584 if (first->HasNoSpillType()) {
2585 data()->AssignSpillRangeToLiveRange(first);
2586 }
2587 range->Spill();
2588}
2589
2590
2591const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters()
2592 const {
2593 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges()
2594 : data()->fixed_live_ranges();
2595}
2596
2597
2598const char* RegisterAllocator::RegisterName(int register_code) const {
2599 if (mode() == GENERAL_REGISTERS) {
2600 return data()->config()->GetGeneralRegisterName(register_code);
2601 } else {
2602 return data()->config()->GetDoubleRegisterName(register_code);
2603 }
2604}
2605
2606
2607LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2608 RegisterKind kind, Zone* local_zone)
2609 : RegisterAllocator(data, kind),
2610 unhandled_live_ranges_(local_zone),
2611 active_live_ranges_(local_zone),
2612 inactive_live_ranges_(local_zone) {
2613 unhandled_live_ranges().reserve(
2614 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
2615 active_live_ranges().reserve(8);
2616 inactive_live_ranges().reserve(8);
2617 // TryAllocateFreeReg and AllocateBlockedReg assume this
2618 // when allocating local arrays.
2619 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
2620 this->data()->config()->num_general_registers());
2621}
2622
2623
2624void LinearScanAllocator::AllocateRegisters() {
2625 DCHECK(unhandled_live_ranges().empty());
2626 DCHECK(active_live_ranges().empty());
2627 DCHECK(inactive_live_ranges().empty());
2628
2629 SplitAndSpillRangesDefinedByMemoryOperand(code()->VirtualRegisterCount() <=
2630 num_allocatable_registers());
2631
2632 for (TopLevelLiveRange* range : data()->live_ranges()) {
2633 if (!CanProcessRange(range)) continue;
2634 for (LiveRange* to_add = range; to_add != nullptr;
2635 to_add = to_add->next()) {
2636 if (!to_add->spilled()) {
2637 AddToUnhandledUnsorted(to_add);
2638 }
2639 }
2640 }
2641 SortUnhandled();
2642 DCHECK(UnhandledIsSorted());
2643
2644 auto& fixed_ranges = GetFixedRegisters();
2645 for (TopLevelLiveRange* current : fixed_ranges) {
2646 if (current != nullptr) {
2647 DCHECK_EQ(mode(), current->kind());
2648 AddToInactive(current);
2649 }
2650 }
2651
2652 while (!unhandled_live_ranges().empty()) {
2653 DCHECK(UnhandledIsSorted());
2654 LiveRange* current = unhandled_live_ranges().back();
2655 unhandled_live_ranges().pop_back();
2656 DCHECK(UnhandledIsSorted());
2657 LifetimePosition position = current->Start();
2658#ifdef DEBUG
2659 allocation_finger_ = position;
2660#endif
2661 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2662 current->relative_id(), position.value());
2663
2664 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2665 continue;
2666
2667 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2668 LiveRange* cur_active = active_live_ranges()[i];
2669 if (cur_active->End() <= position) {
2670 ActiveToHandled(cur_active);
2671 --i; // The live range was removed from the list of active live ranges.
2672 } else if (!cur_active->Covers(position)) {
2673 ActiveToInactive(cur_active);
2674 --i; // The live range was removed from the list of active live ranges.
2675 }
2676 }
2677
2678 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2679 LiveRange* cur_inactive = inactive_live_ranges()[i];
2680 if (cur_inactive->End() <= position) {
2681 InactiveToHandled(cur_inactive);
2682 --i; // Live range was removed from the list of inactive live ranges.
2683 } else if (cur_inactive->Covers(position)) {
2684 InactiveToActive(cur_inactive);
2685 --i; // Live range was removed from the list of inactive live ranges.
2686 }
2687 }
2688
2689 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2690
2691 bool result = TryAllocateFreeReg(current);
2692 if (!result) AllocateBlockedReg(current);
2693 if (current->HasRegisterAssigned()) {
2694 AddToActive(current);
2695 }
2696 }
2697}
2698
2699
2700void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2701 int reg) {
2702 data()->MarkAllocated(range->kind(), reg);
2703 range->set_assigned_register(reg);
2704 range->SetUseHints(reg);
2705 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2706 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2707 }
2708}
2709
2710
2711void LinearScanAllocator::AddToActive(LiveRange* range) {
2712 TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
2713 range->relative_id());
2714 active_live_ranges().push_back(range);
2715}
2716
2717
2718void LinearScanAllocator::AddToInactive(LiveRange* range) {
2719 TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
2720 range->relative_id());
2721 inactive_live_ranges().push_back(range);
2722}
2723
2724
2725void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2726 if (range == nullptr || range->IsEmpty()) return;
2727 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2728 DCHECK(allocation_finger_ <= range->Start());
2729 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2730 --i) {
2731 LiveRange* cur_range = unhandled_live_ranges().at(i);
2732 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2733 TRACE("Add live range %d:%d to unhandled at %d\n",
2734 range->TopLevel()->vreg(), range->relative_id(), i + 1);
2735 auto it = unhandled_live_ranges().begin() + (i + 1);
2736 unhandled_live_ranges().insert(it, range);
2737 DCHECK(UnhandledIsSorted());
2738 return;
2739 }
2740 TRACE("Add live range %d:%d to unhandled at start\n",
2741 range->TopLevel()->vreg(), range->relative_id());
2742 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2743 DCHECK(UnhandledIsSorted());
2744}
2745
2746
2747void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2748 if (range == nullptr || range->IsEmpty()) return;
2749 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2750 TRACE("Add live range %d:%d to unhandled unsorted at end\n",
2751 range->TopLevel()->vreg(), range->relative_id());
2752 unhandled_live_ranges().push_back(range);
2753}
2754
2755
2756static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2757 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2758 if (a->ShouldBeAllocatedBefore(b)) return false;
2759 if (b->ShouldBeAllocatedBefore(a)) return true;
2760 return a->TopLevel()->vreg() < b->TopLevel()->vreg();
2761}
2762
2763
2764// Sort the unhandled live ranges so that the ranges to be processed first are
2765// at the end of the array list. This is convenient for the register allocation
2766// algorithm because it is efficient to remove elements from the end.
2767void LinearScanAllocator::SortUnhandled() {
2768 TRACE("Sort unhandled\n");
2769 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2770 &UnhandledSortHelper);
2771}
2772
2773
2774bool LinearScanAllocator::UnhandledIsSorted() {
2775 size_t len = unhandled_live_ranges().size();
2776 for (size_t i = 1; i < len; i++) {
2777 LiveRange* a = unhandled_live_ranges().at(i - 1);
2778 LiveRange* b = unhandled_live_ranges().at(i);
2779 if (a->Start() < b->Start()) return false;
2780 }
2781 return true;
2782}
2783
2784
2785void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2786 RemoveElement(&active_live_ranges(), range);
2787 TRACE("Moving live range %d:%d from active to handled\n",
2788 range->TopLevel()->vreg(), range->relative_id());
2789}
2790
2791
2792void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2793 RemoveElement(&active_live_ranges(), range);
2794 inactive_live_ranges().push_back(range);
2795 TRACE("Moving live range %d:%d from active to inactive\n",
2796 range->TopLevel()->vreg(), range->relative_id());
2797}
2798
2799
2800void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2801 RemoveElement(&inactive_live_ranges(), range);
2802 TRACE("Moving live range %d:%d from inactive to handled\n",
2803 range->TopLevel()->vreg(), range->relative_id());
2804}
2805
2806
2807void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2808 RemoveElement(&inactive_live_ranges(), range);
2809 active_live_ranges().push_back(range);
2810 TRACE("Moving live range %d:%d from inactive to active\n",
2811 range->TopLevel()->vreg(), range->relative_id());
2812}
2813
2814
2815bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2816 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2817
2818 for (int i = 0; i < num_registers(); i++) {
2819 free_until_pos[i] = LifetimePosition::MaxPosition();
2820 }
2821
2822 for (LiveRange* cur_active : active_live_ranges()) {
2823 free_until_pos[cur_active->assigned_register()] =
2824 LifetimePosition::GapFromInstructionIndex(0);
2825 TRACE("Register %s is free until pos %d (1)\n",
2826 RegisterName(cur_active->assigned_register()),
2827 LifetimePosition::GapFromInstructionIndex(0).value());
2828 }
2829
2830 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2831 DCHECK(cur_inactive->End() > current->Start());
2832 LifetimePosition next_intersection =
2833 cur_inactive->FirstIntersection(current);
2834 if (!next_intersection.IsValid()) continue;
2835 int cur_reg = cur_inactive->assigned_register();
2836 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
2837 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2838 Min(free_until_pos[cur_reg], next_intersection).value());
2839 }
2840
2841 int hint_register;
2842 if (current->FirstHintPosition(&hint_register) != nullptr) {
2843 TRACE(
2844 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
2845 RegisterName(hint_register), free_until_pos[hint_register].value(),
2846 current->TopLevel()->vreg(), current->relative_id(),
2847 current->End().value());
2848
2849 // The desired register is free until the end of the current live range.
2850 if (free_until_pos[hint_register] >= current->End()) {
2851 TRACE("Assigning preferred reg %s to live range %d:%d\n",
2852 RegisterName(hint_register), current->TopLevel()->vreg(),
2853 current->relative_id());
2854 SetLiveRangeAssignedRegister(current, hint_register);
2855 return true;
2856 }
2857 }
2858
2859 // Find the register which stays free for the longest time.
2860 int reg = allocatable_register_code(0);
2861 for (int i = 1; i < num_allocatable_registers(); ++i) {
2862 int code = allocatable_register_code(i);
2863 if (free_until_pos[code] > free_until_pos[reg]) {
2864 reg = code;
2865 }
2866 }
2867
2868 LifetimePosition pos = free_until_pos[reg];
2869
2870 if (pos <= current->Start()) {
2871 // All registers are blocked.
2872 return false;
2873 }
2874
2875 if (pos < current->End()) {
2876 // Register reg is available at the range start but becomes blocked before
2877 // the range end. Split current at position where it becomes blocked.
2878 LiveRange* tail = SplitRangeAt(current, pos);
2879 AddToUnhandledSorted(tail);
2880 }
2881
2882 // Register reg is available at the range start and is free until
2883 // the range end.
2884 DCHECK(pos >= current->End());
2885 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2886 current->TopLevel()->vreg(), current->relative_id());
2887 SetLiveRangeAssignedRegister(current, reg);
2888
2889 return true;
2890}
2891
2892
2893void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2894 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2895 if (register_use == nullptr) {
2896 // There is no use in the current live range that requires a register.
2897 // We can just spill it.
2898 Spill(current);
2899 return;
2900 }
2901
2902 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2903 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
2904
2905 for (int i = 0; i < num_registers(); i++) {
2906 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
2907 }
2908
2909 for (LiveRange* range : active_live_ranges()) {
2910 int cur_reg = range->assigned_register();
2911 if (range->TopLevel()->IsFixed() ||
2912 !range->CanBeSpilled(current->Start())) {
2913 block_pos[cur_reg] = use_pos[cur_reg] =
2914 LifetimePosition::GapFromInstructionIndex(0);
2915 } else {
2916 UsePosition* next_use =
2917 range->NextUsePositionRegisterIsBeneficial(current->Start());
2918 if (next_use == nullptr) {
2919 use_pos[cur_reg] = range->End();
2920 } else {
2921 use_pos[cur_reg] = next_use->pos();
2922 }
2923 }
2924 }
2925
2926 for (LiveRange* range : inactive_live_ranges()) {
2927 DCHECK(range->End() > current->Start());
2928 LifetimePosition next_intersection = range->FirstIntersection(current);
2929 if (!next_intersection.IsValid()) continue;
2930 int cur_reg = range->assigned_register();
2931 if (range->TopLevel()->IsFixed()) {
2932 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
2933 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
2934 } else {
2935 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
2936 }
2937 }
2938
2939 int reg = allocatable_register_code(0);
2940 for (int i = 1; i < num_allocatable_registers(); ++i) {
2941 int code = allocatable_register_code(i);
2942 if (use_pos[code] > use_pos[reg]) {
2943 reg = code;
2944 }
2945 }
2946
2947 LifetimePosition pos = use_pos[reg];
2948
2949 if (pos < register_use->pos()) {
2950 // All registers are blocked before the first use that requires a register.
2951 // Spill starting part of live range up to that use.
2952 SpillBetween(current, current->Start(), register_use->pos());
2953 return;
2954 }
2955
2956 if (block_pos[reg] < current->End()) {
2957 // Register becomes blocked before the current range end. Split before that
2958 // position.
2959 LiveRange* tail =
2960 SplitBetween(current, current->Start(), block_pos[reg].Start());
2961 AddToUnhandledSorted(tail);
2962 }
2963
2964 // Register reg is not blocked for the whole range.
2965 DCHECK(block_pos[reg] >= current->End());
2966 TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
2967 current->TopLevel()->vreg(), current->relative_id());
2968 SetLiveRangeAssignedRegister(current, reg);
2969
2970 // This register was not free. Thus we need to find and spill
2971 // parts of active and inactive live regions that use the same register
2972 // at the same lifetime positions as current.
2973 SplitAndSpillIntersecting(current);
2974}
2975
2976
2977void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2978 DCHECK(current->HasRegisterAssigned());
2979 int reg = current->assigned_register();
2980 LifetimePosition split_pos = current->Start();
2981 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2982 LiveRange* range = active_live_ranges()[i];
2983 if (range->assigned_register() == reg) {
2984 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2985 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2986 if (next_pos == nullptr) {
2987 SpillAfter(range, spill_pos);
2988 } else {
2989 // When spilling between spill_pos and next_pos ensure that the range
2990 // remains spilled at least until the start of the current live range.
2991 // This guarantees that we will not introduce new unhandled ranges that
2992 // start before the current range as this violates allocation invariant
2993 // and will lead to an inconsistent state of active and inactive
2994 // live-ranges: ranges are allocated in order of their start positions,
2995 // ranges are retired from active/inactive when the start of the
2996 // current live-range is larger than their end.
2997 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2998 }
2999 ActiveToHandled(range);
3000 --i;
3001 }
3002 }
3003
3004 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
3005 LiveRange* range = inactive_live_ranges()[i];
3006 DCHECK(range->End() > current->Start());
3007 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
3008 LifetimePosition next_intersection = range->FirstIntersection(current);
3009 if (next_intersection.IsValid()) {
3010 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3011 if (next_pos == nullptr) {
3012 SpillAfter(range, split_pos);
3013 } else {
3014 next_intersection = Min(next_intersection, next_pos->pos());
3015 SpillBetween(range, split_pos, next_intersection);
3016 }
3017 InactiveToHandled(range);
3018 --i;
3019 }
3020 }
3021 }
3022}
3023
3024
3025bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
3026 if (!range->is_phi()) return false;
3027
3028 DCHECK(!range->HasSpillOperand());
3029 RegisterAllocationData::PhiMapValue* phi_map_value =
3030 data()->GetPhiMapValueFor(range);
3031 const PhiInstruction* phi = phi_map_value->phi();
3032 const InstructionBlock* block = phi_map_value->block();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003033 // Count the number of spilled operands.
3034 size_t spilled_count = 0;
3035 LiveRange* first_op = nullptr;
3036 for (size_t i = 0; i < phi->operands().size(); i++) {
3037 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003038 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
3039 if (!op_range->TopLevel()->HasSpillRange()) continue;
3040 const InstructionBlock* pred =
3041 code()->InstructionBlockAt(block->predecessors()[i]);
3042 LifetimePosition pred_end =
3043 LifetimePosition::InstructionFromInstructionIndex(
3044 pred->last_instruction_index());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003045 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
3046 op_range = op_range->next();
3047 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003048 if (op_range != nullptr && op_range->spilled()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003049 spilled_count++;
3050 if (first_op == nullptr) {
3051 first_op = op_range->TopLevel();
3052 }
3053 }
3054 }
3055
3056 // Only continue if more than half of the operands are spilled.
3057 if (spilled_count * 2 <= phi->operands().size()) {
3058 return false;
3059 }
3060
3061 // Try to merge the spilled operands and count the number of merged spilled
3062 // operands.
3063 DCHECK(first_op != nullptr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003064 SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003065 size_t num_merged = 1;
3066 for (size_t i = 1; i < phi->operands().size(); i++) {
3067 int op = phi->operands()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003068 TopLevelLiveRange* op_range = data()->live_ranges()[op];
3069 if (!op_range->HasSpillRange()) continue;
3070 SpillRange* op_spill = op_range->GetSpillRange();
3071 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003072 num_merged++;
3073 }
3074 }
3075
3076 // Only continue if enough operands could be merged to the
3077 // same spill slot.
3078 if (num_merged * 2 <= phi->operands().size() ||
3079 AreUseIntervalsIntersecting(first_op_spill->interval(),
3080 range->first_interval())) {
3081 return false;
3082 }
3083
3084 // If the range does not need register soon, spill it to the merged
3085 // spill range.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003086 LifetimePosition next_pos = range->Start();
3087 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3088 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003089 if (pos == nullptr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003090 SpillRange* spill_range =
3091 range->TopLevel()->HasSpillRange()
3092 ? range->TopLevel()->GetSpillRange()
3093 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3094 bool merged = first_op_spill->TryMerge(spill_range);
3095 CHECK(merged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003096 Spill(range);
3097 return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003098 } else if (pos->pos() > range->Start().NextStart()) {
3099 SpillRange* spill_range =
3100 range->TopLevel()->HasSpillRange()
3101 ? range->TopLevel()->GetSpillRange()
3102 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3103 bool merged = first_op_spill->TryMerge(spill_range);
3104 CHECK(merged);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003105 SpillBetween(range, range->Start(), pos->pos());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003106 DCHECK(UnhandledIsSorted());
3107 return true;
3108 }
3109 return false;
3110}
3111
3112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003113void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3114 LiveRange* second_part = SplitRangeAt(range, pos);
3115 Spill(second_part);
3116}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003118
3119void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3120 LifetimePosition end) {
3121 SpillBetweenUntil(range, start, start, end);
3122}
3123
3124
3125void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3126 LifetimePosition start,
3127 LifetimePosition until,
3128 LifetimePosition end) {
3129 CHECK(start < end);
3130 LiveRange* second_part = SplitRangeAt(range, start);
3131
3132 if (second_part->Start() < end) {
3133 // The split result intersects with [start, end[.
3134 // Split it at position between ]start+1, end[, spill the middle part
3135 // and put the rest to unhandled.
3136 LifetimePosition third_part_end = end.PrevStart().End();
3137 if (data()->IsBlockBoundary(end.Start())) {
3138 third_part_end = end.Start();
3139 }
3140 LiveRange* third_part = SplitBetween(
3141 second_part, Max(second_part->Start().End(), until), third_part_end);
3142
3143 DCHECK(third_part != second_part);
3144
3145 Spill(second_part);
3146 AddToUnhandledSorted(third_part);
3147 } else {
3148 // The split result does not intersect with [start, end[.
3149 // Nothing to spill. Just put it to unhandled as whole.
3150 AddToUnhandledSorted(second_part);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003151 }
3152}
3153
3154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003155SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3156 : data_(data) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003157
3158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003159void SpillSlotLocator::LocateSpillSlots() {
3160 const InstructionSequence* code = data()->code();
3161 for (TopLevelLiveRange* range : data()->live_ranges()) {
3162 if (range == nullptr || range->IsEmpty()) continue;
3163 // We care only about ranges which spill in the frame.
Ben Murdochda12d292016-06-02 14:46:10 +01003164 if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
3165 continue;
3166 }
3167 TopLevelLiveRange::SpillMoveInsertionList* spills =
3168 range->GetSpillMoveInsertionLocations();
3169 DCHECK_NOT_NULL(spills);
3170 for (; spills != nullptr; spills = spills->next) {
3171 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003172 }
3173 }
3174}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003176
3177OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3178
3179
3180void OperandAssigner::AssignSpillSlots() {
3181 ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3182 // Merge disjoint spill ranges
3183 for (size_t i = 0; i < spill_ranges.size(); ++i) {
3184 SpillRange* range = spill_ranges[i];
3185 if (range == nullptr) continue;
3186 if (range->IsEmpty()) continue;
3187 for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
3188 SpillRange* other = spill_ranges[j];
3189 if (other != nullptr && !other->IsEmpty()) {
3190 range->TryMerge(other);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003191 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003192 }
3193 }
3194 // Allocate slots for the merged spill ranges.
3195 for (SpillRange* range : spill_ranges) {
3196 if (range == nullptr || range->IsEmpty()) continue;
3197 // Allocate a new operand referring to the spill slot.
3198 if (!range->HasSlot()) {
3199 int byte_width = range->ByteWidth();
3200 int index = data()->frame()->AllocateSpillSlot(byte_width);
3201 range->set_assigned_slot(index);
3202 }
3203 }
3204}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003205
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003206
3207void OperandAssigner::CommitAssignment() {
3208 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3209 if (top_range == nullptr || top_range->IsEmpty()) continue;
3210 InstructionOperand spill_operand;
3211 if (top_range->HasSpillOperand()) {
3212 spill_operand = *top_range->TopLevel()->GetSpillOperand();
3213 } else if (top_range->TopLevel()->HasSpillRange()) {
3214 spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3215 }
3216 if (top_range->is_phi()) {
3217 data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3218 top_range->GetAssignedOperand());
3219 }
3220 for (LiveRange* range = top_range; range != nullptr;
3221 range = range->next()) {
3222 InstructionOperand assigned = range->GetAssignedOperand();
3223 range->ConvertUsesToOperand(assigned, spill_operand);
3224 }
3225
3226 if (!spill_operand.IsInvalid()) {
3227 // If this top level range has a child spilled in a deferred block, we use
3228 // the range and control flow connection mechanism instead of spilling at
3229 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3230 // phases. Normally, when we spill at definition, we do not insert a
3231 // connecting move when a successor child range is spilled - because the
3232 // spilled range picks up its value from the slot which was assigned at
3233 // definition. For ranges that are determined to spill only in deferred
Ben Murdoch097c5b22016-05-18 11:27:45 +01003234 // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3235 // where a spill operand is expected, and then finalize by inserting the
3236 // spills in the deferred blocks dominators.
3237 if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003238 // Spill at definition if the range isn't spilled only in deferred
3239 // blocks.
3240 top_range->CommitSpillMoves(
3241 data()->code(), spill_operand,
3242 top_range->has_slot_use() || top_range->spilled());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003244 }
3245 }
3246}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003247
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003248
3249ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3250 : data_(data) {}
3251
3252
3253bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3254 int safe_point = 0;
3255 for (ReferenceMap* map : *data()->code()->reference_maps()) {
3256 if (safe_point > map->instruction_position()) return false;
3257 safe_point = map->instruction_position();
3258 }
3259 return true;
3260}
3261
3262
3263void ReferenceMapPopulator::PopulateReferenceMaps() {
3264 DCHECK(SafePointsAreInOrder());
3265 // Map all delayed references.
3266 for (RegisterAllocationData::DelayedReference& delayed_reference :
3267 data()->delayed_references()) {
3268 delayed_reference.map->RecordReference(
3269 AllocatedOperand::cast(*delayed_reference.operand));
3270 }
3271 // Iterate over all safe point positions and record a pointer
3272 // for all spilled live ranges at this point.
3273 int last_range_start = 0;
3274 const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3275 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3276 for (TopLevelLiveRange* range : data()->live_ranges()) {
3277 if (range == nullptr) continue;
3278 // Skip non-reference values.
3279 if (!data()->IsReference(range)) continue;
3280 // Skip empty live ranges.
3281 if (range->IsEmpty()) continue;
3282 if (range->has_preassigned_slot()) continue;
3283
3284 // Find the extent of the range and its children.
3285 int start = range->Start().ToInstructionIndex();
3286 int end = 0;
3287 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3288 LifetimePosition this_end = cur->End();
3289 if (this_end.ToInstructionIndex() > end)
3290 end = this_end.ToInstructionIndex();
3291 DCHECK(cur->Start().ToInstructionIndex() >= start);
3292 }
3293
3294 // Most of the ranges are in order, but not all. Keep an eye on when they
3295 // step backwards and reset the first_it so we don't miss any safe points.
3296 if (start < last_range_start) first_it = reference_maps->begin();
3297 last_range_start = start;
3298
3299 // Step across all the safe points that are before the start of this range,
3300 // recording how far we step in order to save doing this for the next range.
3301 for (; first_it != reference_maps->end(); ++first_it) {
3302 ReferenceMap* map = *first_it;
3303 if (map->instruction_position() >= start) break;
3304 }
3305
3306 InstructionOperand spill_operand;
3307 if (((range->HasSpillOperand() &&
3308 !range->GetSpillOperand()->IsConstant()) ||
3309 range->HasSpillRange())) {
3310 if (range->HasSpillOperand()) {
3311 spill_operand = *range->GetSpillOperand();
3312 } else {
3313 spill_operand = range->GetSpillRangeOperand();
3314 }
3315 DCHECK(spill_operand.IsStackSlot());
3316 DCHECK_EQ(MachineRepresentation::kTagged,
3317 AllocatedOperand::cast(spill_operand).representation());
3318 }
3319
3320 LiveRange* cur = range;
3321 // Step through the safe points to see whether they are in the range.
3322 for (auto it = first_it; it != reference_maps->end(); ++it) {
3323 ReferenceMap* map = *it;
3324 int safe_point = map->instruction_position();
3325
3326 // The safe points are sorted so we can stop searching here.
3327 if (safe_point - 1 > end) break;
3328
3329 // Advance to the next active range that covers the current
3330 // safe point position.
3331 LifetimePosition safe_point_pos =
3332 LifetimePosition::InstructionFromInstructionIndex(safe_point);
3333
3334 // Search for the child range (cur) that covers safe_point_pos. If we
3335 // don't find it before the children pass safe_point_pos, keep cur at
3336 // the last child, because the next safe_point_pos may be covered by cur.
3337 // This may happen if cur has more than one interval, and the current
3338 // safe_point_pos is in between intervals.
3339 // For that reason, cur may be at most the last child.
3340 DCHECK_NOT_NULL(cur);
3341 DCHECK(safe_point_pos >= cur->Start() || range == cur);
3342 bool found = false;
3343 while (!found) {
3344 if (cur->Covers(safe_point_pos)) {
3345 found = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003346 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003347 LiveRange* next = cur->next();
3348 if (next == nullptr || next->Start() > safe_point_pos) {
3349 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003350 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003351 cur = next;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003352 }
3353 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003354
3355 if (!found) {
3356 continue;
3357 }
3358
3359 // Check if the live range is spilled and the safe point is after
3360 // the spill position.
3361 int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3362 ? cur->Start().ToInstructionIndex()
3363 : range->spill_start_index();
3364
3365 if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3366 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
3367 range->vreg(), spill_index, safe_point);
3368 map->RecordReference(AllocatedOperand::cast(spill_operand));
3369 }
3370
3371 if (!cur->spilled()) {
3372 TRACE(
3373 "Pointer in register for range %d:%d (start at %d) "
3374 "at safe point %d\n",
3375 range->vreg(), cur->relative_id(), cur->Start().value(),
3376 safe_point);
3377 InstructionOperand operand = cur->GetAssignedOperand();
3378 DCHECK(!operand.IsStackSlot());
3379 DCHECK_EQ(MachineRepresentation::kTagged,
3380 AllocatedOperand::cast(operand).representation());
3381 map->RecordReference(AllocatedOperand::cast(operand));
3382 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383 }
3384 }
3385}
3386
3387
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003388LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3389 : data_(data) {}
3390
3391
3392bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3393 const InstructionBlock* block) const {
3394 if (block->PredecessorCount() != 1) return false;
3395 return block->predecessors()[0].IsNext(block->rpo_number());
3396}
3397
3398
3399void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003400 // Lazily linearize live ranges in memory for fast lookup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003401 LiveRangeFinder finder(data(), local_zone);
3402 ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3403 for (const InstructionBlock* block : code()->instruction_blocks()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003404 if (CanEagerlyResolveControlFlow(block)) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003405 BitVector* live = live_in_sets[block->rpo_number().ToInt()];
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003406 BitVector::Iterator iterator(live);
3407 while (!iterator.Done()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003408 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
3409 for (const RpoNumber& pred : block->predecessors()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003410 FindResult result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003411 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3412 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003413 continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003414 }
3415 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3416 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3417 if (pred_op.Equals(cur_op)) continue;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003418 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3419 // We're doing a reload.
3420 // We don't need to, if:
3421 // 1) there's no register use in this block, and
3422 // 2) the range ends before the block does, and
3423 // 3) we don't have a successor, or the successor is spilled.
3424 LifetimePosition block_start =
3425 LifetimePosition::GapFromInstructionIndex(block->code_start());
3426 LifetimePosition block_end =
3427 LifetimePosition::GapFromInstructionIndex(block->code_end());
3428 const LiveRange* current = result.cur_cover_;
3429 const LiveRange* successor = current->next();
3430 if (current->End() < block_end &&
3431 (successor == nullptr || successor->spilled())) {
3432 // verify point 1: no register use. We can go to the end of the
3433 // range, since it's all within the block.
3434
3435 bool uses_reg = false;
3436 for (const UsePosition* use = current->NextUsePosition(block_start);
3437 use != nullptr; use = use->next()) {
3438 if (use->operand()->IsAnyRegister()) {
3439 uses_reg = true;
3440 break;
3441 }
3442 }
3443 if (!uses_reg) continue;
3444 }
3445 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3446 pred_block->IsDeferred()) {
3447 // The spill location should be defined in pred_block, so add
3448 // pred_block to the list of blocks requiring a spill operand.
3449 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3450 pred_block->rpo_number().ToInt());
3451 }
3452 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003453 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3454 USE(move_loc);
3455 DCHECK_IMPLIES(
3456 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3457 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3458 code()->GetInstructionBlock(move_loc)->IsDeferred());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003459 }
3460 iterator.Advance();
3461 }
3462 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003463
3464 // At this stage, we collected blocks needing a spill operand from
3465 // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3466 // deferred blocks.
3467 for (TopLevelLiveRange* top : data()->live_ranges()) {
3468 if (top == nullptr || top->IsEmpty() ||
3469 !top->IsSpilledOnlyInDeferredBlocks())
3470 continue;
3471 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3472 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003473}
3474
3475
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003476int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3477 const InstructionOperand& cur_op,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003478 const InstructionBlock* pred,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003479 const InstructionOperand& pred_op) {
3480 DCHECK(!pred_op.Equals(cur_op));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003481 int gap_index;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003482 Instruction::GapPosition position;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003483 if (block->PredecessorCount() == 1) {
3484 gap_index = block->first_instruction_index();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003485 position = Instruction::START;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003486 } else {
3487 DCHECK(pred->SuccessorCount() == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003488 DCHECK(!code()
3489 ->InstructionAt(pred->last_instruction_index())
3490 ->HasReferenceMap());
3491 gap_index = pred->last_instruction_index();
3492 position = Instruction::END;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003493 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003494 data()->AddGapMove(gap_index, position, pred_op, cur_op);
3495 return gap_index;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003496}
3497
3498
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003499void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3500 DelayedInsertionMap delayed_insertion_map(local_zone);
3501 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3502 if (top_range == nullptr) continue;
3503 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3504 LiveRange* first_range = top_range;
3505 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3506 first_range = second_range, second_range = second_range->next()) {
3507 LifetimePosition pos = second_range->Start();
3508 // Add gap move if the two live ranges touch and there is no block
3509 // boundary.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003510 if (second_range->spilled()) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003511 if (first_range->End() != pos) continue;
3512 if (data()->IsBlockBoundary(pos) &&
3513 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003514 continue;
3515 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003516 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3517 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3518 if (prev_operand.Equals(cur_operand)) continue;
3519 bool delay_insertion = false;
3520 Instruction::GapPosition gap_pos;
3521 int gap_index = pos.ToInstructionIndex();
Ben Murdoch097c5b22016-05-18 11:27:45 +01003522 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3523 cur_operand.IsAnyRegister()) {
3524 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3525 DCHECK(block->IsDeferred());
3526 // Performing a reload in this block, meaning the spill operand must
3527 // be defined here.
3528 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3529 block->rpo_number().ToInt());
3530 }
3531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003532 if (pos.IsGapPosition()) {
3533 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003534 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003535 if (pos.IsStart()) {
3536 delay_insertion = true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003537 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003538 gap_index++;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003539 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3541 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003542 // Reloads or spills for spilled in deferred blocks ranges must happen
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 // only in deferred blocks.
3544 DCHECK_IMPLIES(
3545 connect_spilled &&
3546 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3547 code()->GetInstructionBlock(gap_index)->IsDeferred());
3548
3549 ParallelMove* move =
3550 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3551 gap_pos, code_zone());
3552 if (!delay_insertion) {
3553 move->AddMove(prev_operand, cur_operand);
3554 } else {
3555 delayed_insertion_map.insert(
3556 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003557 }
3558 }
3559 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003560 if (delayed_insertion_map.empty()) return;
3561 // Insert all the moves which should occur after the stored move.
3562 ZoneVector<MoveOperands*> to_insert(local_zone);
3563 ZoneVector<MoveOperands*> to_eliminate(local_zone);
3564 to_insert.reserve(4);
3565 to_eliminate.reserve(4);
3566 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3567 for (auto it = delayed_insertion_map.begin();; ++it) {
3568 bool done = it == delayed_insertion_map.end();
3569 if (done || it->first.first != moves) {
3570 // Commit the MoveOperands for current ParallelMove.
3571 for (MoveOperands* move : to_eliminate) {
3572 move->Eliminate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003573 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003574 for (MoveOperands* move : to_insert) {
3575 moves->push_back(move);
3576 }
3577 if (done) break;
3578 // Reset state.
3579 to_eliminate.clear();
3580 to_insert.clear();
3581 moves = it->first.first;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003582 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003583 // Gather all MoveOperands for a single ParallelMove.
3584 MoveOperands* move =
3585 new (code_zone()) MoveOperands(it->first.second, it->second);
3586 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3587 to_insert.push_back(move);
3588 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003589 }
3590}
3591
3592
Ben Murdoch097c5b22016-05-18 11:27:45 +01003593void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3594 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3595 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3596 DCHECK(!range->spilled());
3597
3598 InstructionSequence* code = data()->code();
3599 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3600
3601 TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3602 range->vreg());
3603 // If we have ranges that aren't spilled but require the operand on the stack,
3604 // make sure we insert the spill.
3605 for (const LiveRange* child = range; child != nullptr;
3606 child = child->next()) {
3607 for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3608 pos = pos->next()) {
3609 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3610 continue;
3611 range->AddBlockRequiringSpillOperand(
3612 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3613 ->rpo_number());
3614 }
3615 }
3616
3617 ZoneQueue<int> worklist(temp_zone);
3618
3619 for (BitVector::Iterator iterator(
3620 range->GetListOfBlocksRequiringSpillOperands());
3621 !iterator.Done(); iterator.Advance()) {
3622 worklist.push(iterator.Current());
3623 }
3624
3625 // Seek the deferred blocks that dominate locations requiring spill operands,
3626 // and spill there. We only need to spill at the start of such blocks.
3627 BitVector done_blocks(
3628 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3629 while (!worklist.empty()) {
3630 int block_id = worklist.front();
3631 worklist.pop();
3632 if (done_blocks.Contains(block_id)) continue;
3633 done_blocks.Add(block_id);
Ben Murdochda12d292016-06-02 14:46:10 +01003634 InstructionBlock* spill_block =
Ben Murdoch097c5b22016-05-18 11:27:45 +01003635 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3636
3637 for (const RpoNumber& pred : spill_block->predecessors()) {
3638 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3639
3640 if (pred_block->IsDeferred()) {
3641 worklist.push(pred_block->rpo_number().ToInt());
3642 } else {
3643 LifetimePosition pred_end =
3644 LifetimePosition::InstructionFromInstructionIndex(
3645 pred_block->last_instruction_index());
3646
3647 LiveRangeBound* bound = array->Find(pred_end);
3648
3649 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3650
3651 data()->AddGapMove(spill_block->first_instruction_index(),
3652 Instruction::GapPosition::START, pred_op,
3653 spill_operand);
Ben Murdochda12d292016-06-02 14:46:10 +01003654 spill_block->mark_needs_frame();
Ben Murdoch097c5b22016-05-18 11:27:45 +01003655 }
3656 }
3657 }
3658}
3659
3660
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003661} // namespace compiler
3662} // namespace internal
3663} // namespace v8