blob: 972a9045094a22ed2ceb355f0664e35b29201d6a [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/register-allocator.h"
6
7#include "src/compiler/linkage.h"
8#include "src/hydrogen.h"
9#include "src/string-stream.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
16 return a.Value() < b.Value() ? a : b;
17}
18
19
20static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
21 return a.Value() > b.Value() ? a : b;
22}
23
24
25UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
26 InstructionOperand* hint)
27 : operand_(operand),
28 hint_(hint),
29 pos_(pos),
30 next_(NULL),
31 requires_reg_(false),
32 register_beneficial_(true) {
33 if (operand_ != NULL && operand_->IsUnallocated()) {
34 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
35 requires_reg_ = unalloc->HasRegisterPolicy();
36 register_beneficial_ = !unalloc->HasAnyPolicy();
37 }
38 DCHECK(pos_.IsValid());
39}
40
41
42bool UsePosition::HasHint() const {
43 return hint_ != NULL && !hint_->IsUnallocated();
44}
45
46
47bool UsePosition::RequiresRegister() const { return requires_reg_; }
48
49
50bool UsePosition::RegisterIsBeneficial() const { return register_beneficial_; }
51
52
53void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
54 DCHECK(Contains(pos) && pos.Value() != start().Value());
55 UseInterval* after = new (zone) UseInterval(pos, end_);
56 after->next_ = next_;
57 next_ = after;
58 end_ = pos;
59}
60
61
62#ifdef DEBUG
63
64
65void LiveRange::Verify() const {
66 UsePosition* cur = first_pos_;
67 while (cur != NULL) {
68 DCHECK(Start().Value() <= cur->pos().Value() &&
69 cur->pos().Value() <= End().Value());
70 cur = cur->next();
71 }
72}
73
74
75bool LiveRange::HasOverlap(UseInterval* target) const {
76 UseInterval* current_interval = first_interval_;
77 while (current_interval != NULL) {
78 // Intervals overlap if the start of one is contained in the other.
79 if (current_interval->Contains(target->start()) ||
80 target->Contains(current_interval->start())) {
81 return true;
82 }
83 current_interval = current_interval->next();
84 }
85 return false;
86}
87
88
89#endif
90
91
92LiveRange::LiveRange(int id, Zone* zone)
93 : id_(id),
94 spilled_(false),
95 is_phi_(false),
96 is_non_loop_phi_(false),
97 kind_(UNALLOCATED_REGISTERS),
98 assigned_register_(kInvalidAssignment),
99 last_interval_(NULL),
100 first_interval_(NULL),
101 first_pos_(NULL),
102 parent_(NULL),
103 next_(NULL),
104 current_interval_(NULL),
105 last_processed_use_(NULL),
106 current_hint_operand_(NULL),
107 spill_operand_(new (zone) InstructionOperand()),
108 spill_start_index_(kMaxInt) {}
109
110
111void LiveRange::set_assigned_register(int reg, Zone* zone) {
112 DCHECK(!HasRegisterAssigned() && !IsSpilled());
113 assigned_register_ = reg;
114 ConvertOperands(zone);
115}
116
117
118void LiveRange::MakeSpilled(Zone* zone) {
119 DCHECK(!IsSpilled());
120 DCHECK(TopLevel()->HasAllocatedSpillOperand());
121 spilled_ = true;
122 assigned_register_ = kInvalidAssignment;
123 ConvertOperands(zone);
124}
125
126
127bool LiveRange::HasAllocatedSpillOperand() const {
128 DCHECK(spill_operand_ != NULL);
129 return !spill_operand_->IsIgnored();
130}
131
132
133void LiveRange::SetSpillOperand(InstructionOperand* operand) {
134 DCHECK(!operand->IsUnallocated());
135 DCHECK(spill_operand_ != NULL);
136 DCHECK(spill_operand_->IsIgnored());
137 spill_operand_->ConvertTo(operand->kind(), operand->index());
138}
139
140
141UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
142 UsePosition* use_pos = last_processed_use_;
143 if (use_pos == NULL) use_pos = first_pos();
144 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
145 use_pos = use_pos->next();
146 }
147 last_processed_use_ = use_pos;
148 return use_pos;
149}
150
151
152UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
153 LifetimePosition start) {
154 UsePosition* pos = NextUsePosition(start);
155 while (pos != NULL && !pos->RegisterIsBeneficial()) {
156 pos = pos->next();
157 }
158 return pos;
159}
160
161
162UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
163 LifetimePosition start) {
164 UsePosition* pos = first_pos();
165 UsePosition* prev = NULL;
166 while (pos != NULL && pos->pos().Value() < start.Value()) {
167 if (pos->RegisterIsBeneficial()) prev = pos;
168 pos = pos->next();
169 }
170 return prev;
171}
172
173
174UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
175 UsePosition* pos = NextUsePosition(start);
176 while (pos != NULL && !pos->RequiresRegister()) {
177 pos = pos->next();
178 }
179 return pos;
180}
181
182
183bool LiveRange::CanBeSpilled(LifetimePosition pos) {
184 // We cannot spill a live range that has a use requiring a register
185 // at the current or the immediate next position.
186 UsePosition* use_pos = NextRegisterPosition(pos);
187 if (use_pos == NULL) return true;
188 return use_pos->pos().Value() >
189 pos.NextInstruction().InstructionEnd().Value();
190}
191
192
193InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
194 InstructionOperand* op = NULL;
195 if (HasRegisterAssigned()) {
196 DCHECK(!IsSpilled());
197 switch (Kind()) {
198 case GENERAL_REGISTERS:
199 op = RegisterOperand::Create(assigned_register(), zone);
200 break;
201 case DOUBLE_REGISTERS:
202 op = DoubleRegisterOperand::Create(assigned_register(), zone);
203 break;
204 default:
205 UNREACHABLE();
206 }
207 } else if (IsSpilled()) {
208 DCHECK(!HasRegisterAssigned());
209 op = TopLevel()->GetSpillOperand();
210 DCHECK(!op->IsUnallocated());
211 } else {
212 UnallocatedOperand* unalloc =
213 new (zone) UnallocatedOperand(UnallocatedOperand::NONE);
214 unalloc->set_virtual_register(id_);
215 op = unalloc;
216 }
217 return op;
218}
219
220
221UseInterval* LiveRange::FirstSearchIntervalForPosition(
222 LifetimePosition position) const {
223 if (current_interval_ == NULL) return first_interval_;
224 if (current_interval_->start().Value() > position.Value()) {
225 current_interval_ = NULL;
226 return first_interval_;
227 }
228 return current_interval_;
229}
230
231
232void LiveRange::AdvanceLastProcessedMarker(
233 UseInterval* to_start_of, LifetimePosition but_not_past) const {
234 if (to_start_of == NULL) return;
235 if (to_start_of->start().Value() > but_not_past.Value()) return;
236 LifetimePosition start = current_interval_ == NULL
237 ? LifetimePosition::Invalid()
238 : current_interval_->start();
239 if (to_start_of->start().Value() > start.Value()) {
240 current_interval_ = to_start_of;
241 }
242}
243
244
245void LiveRange::SplitAt(LifetimePosition position, LiveRange* result,
246 Zone* zone) {
247 DCHECK(Start().Value() < position.Value());
248 DCHECK(result->IsEmpty());
249 // Find the last interval that ends before the position. If the
250 // position is contained in one of the intervals in the chain, we
251 // split that interval and use the first part.
252 UseInterval* current = FirstSearchIntervalForPosition(position);
253
254 // If the split position coincides with the beginning of a use interval
255 // we need to split use positons in a special way.
256 bool split_at_start = false;
257
258 if (current->start().Value() == position.Value()) {
259 // When splitting at start we need to locate the previous use interval.
260 current = first_interval_;
261 }
262
263 while (current != NULL) {
264 if (current->Contains(position)) {
265 current->SplitAt(position, zone);
266 break;
267 }
268 UseInterval* next = current->next();
269 if (next->start().Value() >= position.Value()) {
270 split_at_start = (next->start().Value() == position.Value());
271 break;
272 }
273 current = next;
274 }
275
276 // Partition original use intervals to the two live ranges.
277 UseInterval* before = current;
278 UseInterval* after = before->next();
279 result->last_interval_ =
280 (last_interval_ == before)
281 ? after // Only interval in the range after split.
282 : last_interval_; // Last interval of the original range.
283 result->first_interval_ = after;
284 last_interval_ = before;
285
286 // Find the last use position before the split and the first use
287 // position after it.
288 UsePosition* use_after = first_pos_;
289 UsePosition* use_before = NULL;
290 if (split_at_start) {
291 // The split position coincides with the beginning of a use interval (the
292 // end of a lifetime hole). Use at this position should be attributed to
293 // the split child because split child owns use interval covering it.
294 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
295 use_before = use_after;
296 use_after = use_after->next();
297 }
298 } else {
299 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
300 use_before = use_after;
301 use_after = use_after->next();
302 }
303 }
304
305 // Partition original use positions to the two live ranges.
306 if (use_before != NULL) {
307 use_before->next_ = NULL;
308 } else {
309 first_pos_ = NULL;
310 }
311 result->first_pos_ = use_after;
312
313 // Discard cached iteration state. It might be pointing
314 // to the use that no longer belongs to this live range.
315 last_processed_use_ = NULL;
316 current_interval_ = NULL;
317
318 // Link the new live range in the chain before any of the other
319 // ranges linked from the range before the split.
320 result->parent_ = (parent_ == NULL) ? this : parent_;
321 result->kind_ = result->parent_->kind_;
322 result->next_ = next_;
323 next_ = result;
324
325#ifdef DEBUG
326 Verify();
327 result->Verify();
328#endif
329}
330
331
332// This implements an ordering on live ranges so that they are ordered by their
333// start positions. This is needed for the correctness of the register
334// allocation algorithm. If two live ranges start at the same offset then there
335// is a tie breaker based on where the value is first used. This part of the
336// ordering is merely a heuristic.
337bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
338 LifetimePosition start = Start();
339 LifetimePosition other_start = other->Start();
340 if (start.Value() == other_start.Value()) {
341 UsePosition* pos = first_pos();
342 if (pos == NULL) return false;
343 UsePosition* other_pos = other->first_pos();
344 if (other_pos == NULL) return true;
345 return pos->pos().Value() < other_pos->pos().Value();
346 }
347 return start.Value() < other_start.Value();
348}
349
350
351void LiveRange::ShortenTo(LifetimePosition start) {
352 RegisterAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_,
353 start.Value());
354 DCHECK(first_interval_ != NULL);
355 DCHECK(first_interval_->start().Value() <= start.Value());
356 DCHECK(start.Value() < first_interval_->end().Value());
357 first_interval_->set_start(start);
358}
359
360
361void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end,
362 Zone* zone) {
363 RegisterAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
364 id_, start.Value(), end.Value());
365 LifetimePosition new_end = end;
366 while (first_interval_ != NULL &&
367 first_interval_->start().Value() <= end.Value()) {
368 if (first_interval_->end().Value() > end.Value()) {
369 new_end = first_interval_->end();
370 }
371 first_interval_ = first_interval_->next();
372 }
373
374 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
375 new_interval->next_ = first_interval_;
376 first_interval_ = new_interval;
377 if (new_interval->next() == NULL) {
378 last_interval_ = new_interval;
379 }
380}
381
382
383void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end,
384 Zone* zone) {
385 RegisterAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", id_,
386 start.Value(), end.Value());
387 if (first_interval_ == NULL) {
388 UseInterval* interval = new (zone) UseInterval(start, end);
389 first_interval_ = interval;
390 last_interval_ = interval;
391 } else {
392 if (end.Value() == first_interval_->start().Value()) {
393 first_interval_->set_start(start);
394 } else if (end.Value() < first_interval_->start().Value()) {
395 UseInterval* interval = new (zone) UseInterval(start, end);
396 interval->set_next(first_interval_);
397 first_interval_ = interval;
398 } else {
399 // Order of instruction's processing (see ProcessInstructions) guarantees
400 // that each new use interval either precedes or intersects with
401 // last added interval.
402 DCHECK(start.Value() < first_interval_->end().Value());
403 first_interval_->start_ = Min(start, first_interval_->start_);
404 first_interval_->end_ = Max(end, first_interval_->end_);
405 }
406 }
407}
408
409
410void LiveRange::AddUsePosition(LifetimePosition pos,
411 InstructionOperand* operand,
412 InstructionOperand* hint, Zone* zone) {
413 RegisterAllocator::TraceAlloc("Add to live range %d use position %d\n", id_,
414 pos.Value());
415 UsePosition* use_pos = new (zone) UsePosition(pos, operand, hint);
416 UsePosition* prev_hint = NULL;
417 UsePosition* prev = NULL;
418 UsePosition* current = first_pos_;
419 while (current != NULL && current->pos().Value() < pos.Value()) {
420 prev_hint = current->HasHint() ? current : prev_hint;
421 prev = current;
422 current = current->next();
423 }
424
425 if (prev == NULL) {
426 use_pos->set_next(first_pos_);
427 first_pos_ = use_pos;
428 } else {
429 use_pos->next_ = prev->next_;
430 prev->next_ = use_pos;
431 }
432
433 if (prev_hint == NULL && use_pos->HasHint()) {
434 current_hint_operand_ = hint;
435 }
436}
437
438
439void LiveRange::ConvertOperands(Zone* zone) {
440 InstructionOperand* op = CreateAssignedOperand(zone);
441 UsePosition* use_pos = first_pos();
442 while (use_pos != NULL) {
443 DCHECK(Start().Value() <= use_pos->pos().Value() &&
444 use_pos->pos().Value() <= End().Value());
445
446 if (use_pos->HasOperand()) {
447 DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
448 !use_pos->RequiresRegister());
449 use_pos->operand()->ConvertTo(op->kind(), op->index());
450 }
451 use_pos = use_pos->next();
452 }
453}
454
455
456bool LiveRange::CanCover(LifetimePosition position) const {
457 if (IsEmpty()) return false;
458 return Start().Value() <= position.Value() &&
459 position.Value() < End().Value();
460}
461
462
463bool LiveRange::Covers(LifetimePosition position) {
464 if (!CanCover(position)) return false;
465 UseInterval* start_search = FirstSearchIntervalForPosition(position);
466 for (UseInterval* interval = start_search; interval != NULL;
467 interval = interval->next()) {
468 DCHECK(interval->next() == NULL ||
469 interval->next()->start().Value() >= interval->start().Value());
470 AdvanceLastProcessedMarker(interval, position);
471 if (interval->Contains(position)) return true;
472 if (interval->start().Value() > position.Value()) return false;
473 }
474 return false;
475}
476
477
478LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
479 UseInterval* b = other->first_interval();
480 if (b == NULL) return LifetimePosition::Invalid();
481 LifetimePosition advance_last_processed_up_to = b->start();
482 UseInterval* a = FirstSearchIntervalForPosition(b->start());
483 while (a != NULL && b != NULL) {
484 if (a->start().Value() > other->End().Value()) break;
485 if (b->start().Value() > End().Value()) break;
486 LifetimePosition cur_intersection = a->Intersect(b);
487 if (cur_intersection.IsValid()) {
488 return cur_intersection;
489 }
490 if (a->start().Value() < b->start().Value()) {
491 a = a->next();
492 if (a == NULL || a->start().Value() > other->End().Value()) break;
493 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
494 } else {
495 b = b->next();
496 }
497 }
498 return LifetimePosition::Invalid();
499}
500
501
502RegisterAllocator::RegisterAllocator(InstructionSequence* code)
503 : zone_(code->isolate()),
504 code_(code),
505 live_in_sets_(code->BasicBlockCount(), zone()),
506 live_ranges_(code->VirtualRegisterCount() * 2, zone()),
507 fixed_live_ranges_(NULL),
508 fixed_double_live_ranges_(NULL),
509 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, zone()),
510 active_live_ranges_(8, zone()),
511 inactive_live_ranges_(8, zone()),
512 reusable_slots_(8, zone()),
513 mode_(UNALLOCATED_REGISTERS),
514 num_registers_(-1),
515 allocation_ok_(true) {}
516
517
518void RegisterAllocator::InitializeLivenessAnalysis() {
519 // Initialize the live_in sets for each block to NULL.
520 int block_count = code()->BasicBlockCount();
521 live_in_sets_.Initialize(block_count, zone());
522 live_in_sets_.AddBlock(NULL, block_count, zone());
523}
524
525
526BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) {
527 // Compute live out for the given block, except not including backward
528 // successor edges.
529 BitVector* live_out =
530 new (zone()) BitVector(code()->VirtualRegisterCount(), zone());
531
532 // Process all successor blocks.
533 BasicBlock::Successors successors = block->successors();
534 for (BasicBlock::Successors::iterator i = successors.begin();
535 i != successors.end(); ++i) {
536 // Add values live on entry to the successor. Note the successor's
537 // live_in will not be computed yet for backwards edges.
538 BasicBlock* successor = *i;
539 BitVector* live_in = live_in_sets_[successor->rpo_number_];
540 if (live_in != NULL) live_out->Union(*live_in);
541
542 // All phi input operands corresponding to this successor edge are live
543 // out from this block.
544 int index = successor->PredecessorIndexOf(block);
545 DCHECK(index >= 0);
546 DCHECK(index < static_cast<int>(successor->PredecessorCount()));
547 for (BasicBlock::const_iterator j = successor->begin();
548 j != successor->end(); ++j) {
549 Node* phi = *j;
550 if (phi->opcode() != IrOpcode::kPhi) continue;
551 Node* input = phi->InputAt(index);
552 live_out->Add(input->id());
553 }
554 }
555
556 return live_out;
557}
558
559
560void RegisterAllocator::AddInitialIntervals(BasicBlock* block,
561 BitVector* live_out) {
562 // Add an interval that includes the entire block to the live range for
563 // each live_out value.
564 LifetimePosition start =
565 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
566 LifetimePosition end = LifetimePosition::FromInstructionIndex(
567 block->last_instruction_index()).NextInstruction();
568 BitVector::Iterator iterator(live_out);
569 while (!iterator.Done()) {
570 int operand_index = iterator.Current();
571 LiveRange* range = LiveRangeFor(operand_index);
572 range->AddUseInterval(start, end, zone());
573 iterator.Advance();
574 }
575}
576
577
578int RegisterAllocator::FixedDoubleLiveRangeID(int index) {
579 return -index - 1 - Register::kMaxNumAllocatableRegisters;
580}
581
582
583InstructionOperand* RegisterAllocator::AllocateFixed(
584 UnallocatedOperand* operand, int pos, bool is_tagged) {
585 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
586 DCHECK(operand->HasFixedPolicy());
587 if (operand->HasFixedSlotPolicy()) {
588 operand->ConvertTo(InstructionOperand::STACK_SLOT,
589 operand->fixed_slot_index());
590 } else if (operand->HasFixedRegisterPolicy()) {
591 int reg_index = operand->fixed_register_index();
592 operand->ConvertTo(InstructionOperand::REGISTER, reg_index);
593 } else if (operand->HasFixedDoubleRegisterPolicy()) {
594 int reg_index = operand->fixed_register_index();
595 operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index);
596 } else {
597 UNREACHABLE();
598 }
599 if (is_tagged) {
600 TraceAlloc("Fixed reg is tagged at %d\n", pos);
601 Instruction* instr = InstructionAt(pos);
602 if (instr->HasPointerMap()) {
603 instr->pointer_map()->RecordPointer(operand, code_zone());
604 }
605 }
606 return operand;
607}
608
609
610LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) {
611 DCHECK(index < Register::kMaxNumAllocatableRegisters);
612 LiveRange* result = fixed_live_ranges_[index];
613 if (result == NULL) {
614 // TODO(titzer): add a utility method to allocate a new LiveRange:
615 // The LiveRange object itself can go in this zone, but the
616 // InstructionOperand needs
617 // to go in the code zone, since it may survive register allocation.
618 result = new (zone()) LiveRange(FixedLiveRangeID(index), code_zone());
619 DCHECK(result->IsFixed());
620 result->kind_ = GENERAL_REGISTERS;
621 SetLiveRangeAssignedRegister(result, index);
622 fixed_live_ranges_[index] = result;
623 }
624 return result;
625}
626
627
628LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) {
629 DCHECK(index < DoubleRegister::NumAllocatableRegisters());
630 LiveRange* result = fixed_double_live_ranges_[index];
631 if (result == NULL) {
632 result = new (zone()) LiveRange(FixedDoubleLiveRangeID(index), code_zone());
633 DCHECK(result->IsFixed());
634 result->kind_ = DOUBLE_REGISTERS;
635 SetLiveRangeAssignedRegister(result, index);
636 fixed_double_live_ranges_[index] = result;
637 }
638 return result;
639}
640
641
642LiveRange* RegisterAllocator::LiveRangeFor(int index) {
643 if (index >= live_ranges_.length()) {
644 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
645 }
646 LiveRange* result = live_ranges_[index];
647 if (result == NULL) {
648 result = new (zone()) LiveRange(index, code_zone());
649 live_ranges_[index] = result;
650 }
651 return result;
652}
653
654
655GapInstruction* RegisterAllocator::GetLastGap(BasicBlock* block) {
656 int last_instruction = block->last_instruction_index();
657 return code()->GapAt(last_instruction - 1);
658}
659
660
661LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) {
662 if (operand->IsUnallocated()) {
663 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
664 } else if (operand->IsRegister()) {
665 return FixedLiveRangeFor(operand->index());
666 } else if (operand->IsDoubleRegister()) {
667 return FixedDoubleLiveRangeFor(operand->index());
668 } else {
669 return NULL;
670 }
671}
672
673
674void RegisterAllocator::Define(LifetimePosition position,
675 InstructionOperand* operand,
676 InstructionOperand* hint) {
677 LiveRange* range = LiveRangeFor(operand);
678 if (range == NULL) return;
679
680 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
681 // Can happen if there is a definition without use.
682 range->AddUseInterval(position, position.NextInstruction(), zone());
683 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
684 } else {
685 range->ShortenTo(position);
686 }
687
688 if (operand->IsUnallocated()) {
689 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
690 range->AddUsePosition(position, unalloc_operand, hint, zone());
691 }
692}
693
694
695void RegisterAllocator::Use(LifetimePosition block_start,
696 LifetimePosition position,
697 InstructionOperand* operand,
698 InstructionOperand* hint) {
699 LiveRange* range = LiveRangeFor(operand);
700 if (range == NULL) return;
701 if (operand->IsUnallocated()) {
702 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
703 range->AddUsePosition(position, unalloc_operand, hint, zone());
704 }
705 range->AddUseInterval(block_start, position, zone());
706}
707
708
709void RegisterAllocator::AddConstraintsGapMove(int index,
710 InstructionOperand* from,
711 InstructionOperand* to) {
712 GapInstruction* gap = code()->GapAt(index);
713 ParallelMove* move =
714 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
715 if (from->IsUnallocated()) {
716 const ZoneList<MoveOperands>* move_operands = move->move_operands();
717 for (int i = 0; i < move_operands->length(); ++i) {
718 MoveOperands cur = move_operands->at(i);
719 InstructionOperand* cur_to = cur.destination();
720 if (cur_to->IsUnallocated()) {
721 if (UnallocatedOperand::cast(cur_to)->virtual_register() ==
722 UnallocatedOperand::cast(from)->virtual_register()) {
723 move->AddMove(cur.source(), to, code_zone());
724 return;
725 }
726 }
727 }
728 }
729 move->AddMove(from, to, code_zone());
730}
731
732
733void RegisterAllocator::MeetRegisterConstraints(BasicBlock* block) {
734 int start = block->first_instruction_index();
735 int end = block->last_instruction_index();
736 DCHECK_NE(-1, start);
737 for (int i = start; i <= end; ++i) {
738 if (code()->IsGapAt(i)) {
739 Instruction* instr = NULL;
740 Instruction* prev_instr = NULL;
741 if (i < end) instr = InstructionAt(i + 1);
742 if (i > start) prev_instr = InstructionAt(i - 1);
743 MeetConstraintsBetween(prev_instr, instr, i);
744 if (!AllocationOk()) return;
745 }
746 }
747
748 // Meet register constraints for the instruction in the end.
749 if (!code()->IsGapAt(end)) {
750 MeetRegisterConstraintsForLastInstructionInBlock(block);
751 }
752}
753
754
755void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock(
756 BasicBlock* block) {
757 int end = block->last_instruction_index();
758 Instruction* last_instruction = InstructionAt(end);
759 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
760 InstructionOperand* output_operand = last_instruction->OutputAt(i);
761 DCHECK(!output_operand->IsConstant());
762 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
763 int output_vreg = output->virtual_register();
764 LiveRange* range = LiveRangeFor(output_vreg);
765 bool assigned = false;
766 if (output->HasFixedPolicy()) {
767 AllocateFixed(output, -1, false);
768 // This value is produced on the stack, we never need to spill it.
769 if (output->IsStackSlot()) {
770 range->SetSpillOperand(output);
771 range->SetSpillStartIndex(end);
772 assigned = true;
773 }
774
775 BasicBlock::Successors successors = block->successors();
776 for (BasicBlock::Successors::iterator succ = successors.begin();
777 succ != successors.end(); ++succ) {
778 DCHECK((*succ)->PredecessorCount() == 1);
779 int gap_index = (*succ)->first_instruction_index() + 1;
780 DCHECK(code()->IsGapAt(gap_index));
781
782 // Create an unconstrained operand for the same virtual register
783 // and insert a gap move from the fixed output to the operand.
784 UnallocatedOperand* output_copy =
785 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
786 output_copy->set_virtual_register(output_vreg);
787
788 code()->AddGapMove(gap_index, output, output_copy);
789 }
790 }
791
792 if (!assigned) {
793 BasicBlock::Successors successors = block->successors();
794 for (BasicBlock::Successors::iterator succ = successors.begin();
795 succ != successors.end(); ++succ) {
796 DCHECK((*succ)->PredecessorCount() == 1);
797 int gap_index = (*succ)->first_instruction_index() + 1;
798 range->SetSpillStartIndex(gap_index);
799
800 // This move to spill operand is not a real use. Liveness analysis
801 // and splitting of live ranges do not account for it.
802 // Thus it should be inserted to a lifetime position corresponding to
803 // the instruction end.
804 GapInstruction* gap = code()->GapAt(gap_index);
805 ParallelMove* move =
806 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
807 move->AddMove(output, range->GetSpillOperand(), code_zone());
808 }
809 }
810 }
811}
812
813
814void RegisterAllocator::MeetConstraintsBetween(Instruction* first,
815 Instruction* second,
816 int gap_index) {
817 if (first != NULL) {
818 // Handle fixed temporaries.
819 for (size_t i = 0; i < first->TempCount(); i++) {
820 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
821 if (temp->HasFixedPolicy()) {
822 AllocateFixed(temp, gap_index - 1, false);
823 }
824 }
825
826 // Handle constant/fixed output operands.
827 for (size_t i = 0; i < first->OutputCount(); i++) {
828 InstructionOperand* output = first->OutputAt(i);
829 if (output->IsConstant()) {
830 int output_vreg = output->index();
831 LiveRange* range = LiveRangeFor(output_vreg);
832 range->SetSpillStartIndex(gap_index - 1);
833 range->SetSpillOperand(output);
834 } else {
835 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
836 LiveRange* range = LiveRangeFor(first_output->virtual_register());
837 bool assigned = false;
838 if (first_output->HasFixedPolicy()) {
839 UnallocatedOperand* output_copy =
840 first_output->CopyUnconstrained(code_zone());
841 bool is_tagged = HasTaggedValue(first_output->virtual_register());
842 AllocateFixed(first_output, gap_index, is_tagged);
843
844 // This value is produced on the stack, we never need to spill it.
845 if (first_output->IsStackSlot()) {
846 range->SetSpillOperand(first_output);
847 range->SetSpillStartIndex(gap_index - 1);
848 assigned = true;
849 }
850 code()->AddGapMove(gap_index, first_output, output_copy);
851 }
852
853 // Make sure we add a gap move for spilling (if we have not done
854 // so already).
855 if (!assigned) {
856 range->SetSpillStartIndex(gap_index);
857
858 // This move to spill operand is not a real use. Liveness analysis
859 // and splitting of live ranges do not account for it.
860 // Thus it should be inserted to a lifetime position corresponding to
861 // the instruction end.
862 GapInstruction* gap = code()->GapAt(gap_index);
863 ParallelMove* move =
864 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
865 move->AddMove(first_output, range->GetSpillOperand(), code_zone());
866 }
867 }
868 }
869 }
870
871 if (second != NULL) {
872 // Handle fixed input operands of second instruction.
873 for (size_t i = 0; i < second->InputCount(); i++) {
874 InstructionOperand* input = second->InputAt(i);
875 if (input->IsImmediate()) continue; // Ignore immediates.
876 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
877 if (cur_input->HasFixedPolicy()) {
878 UnallocatedOperand* input_copy =
879 cur_input->CopyUnconstrained(code_zone());
880 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
881 AllocateFixed(cur_input, gap_index + 1, is_tagged);
882 AddConstraintsGapMove(gap_index, input_copy, cur_input);
883 }
884 }
885
886 // Handle "output same as input" for second instruction.
887 for (size_t i = 0; i < second->OutputCount(); i++) {
888 InstructionOperand* output = second->OutputAt(i);
889 if (!output->IsUnallocated()) continue;
890 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
891 if (second_output->HasSameAsInputPolicy()) {
892 DCHECK(i == 0); // Only valid for first output.
893 UnallocatedOperand* cur_input =
894 UnallocatedOperand::cast(second->InputAt(0));
895 int output_vreg = second_output->virtual_register();
896 int input_vreg = cur_input->virtual_register();
897
898 UnallocatedOperand* input_copy =
899 cur_input->CopyUnconstrained(code_zone());
900 cur_input->set_virtual_register(second_output->virtual_register());
901 AddConstraintsGapMove(gap_index, input_copy, cur_input);
902
903 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
904 int index = gap_index + 1;
905 Instruction* instr = InstructionAt(index);
906 if (instr->HasPointerMap()) {
907 instr->pointer_map()->RecordPointer(input_copy, code_zone());
908 }
909 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
910 // The input is assumed to immediately have a tagged representation,
911 // before the pointer map can be used. I.e. the pointer map at the
912 // instruction will include the output operand (whose value at the
913 // beginning of the instruction is equal to the input operand). If
914 // this is not desired, then the pointer map at this instruction needs
915 // to be adjusted manually.
916 }
917 }
918 }
919 }
920}
921
922
923bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) {
924 for (size_t i = 0; i < instr->OutputCount(); i++) {
925 InstructionOperand* output = instr->OutputAt(i);
926 if (output->IsRegister() && output->index() == index) return true;
927 }
928 return false;
929}
930
931
932bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr,
933 int index) {
934 for (size_t i = 0; i < instr->OutputCount(); i++) {
935 InstructionOperand* output = instr->OutputAt(i);
936 if (output->IsDoubleRegister() && output->index() == index) return true;
937 }
938 return false;
939}
940
941
942void RegisterAllocator::ProcessInstructions(BasicBlock* block,
943 BitVector* live) {
944 int block_start = block->first_instruction_index();
945
946 LifetimePosition block_start_position =
947 LifetimePosition::FromInstructionIndex(block_start);
948
949 for (int index = block->last_instruction_index(); index >= block_start;
950 index--) {
951 LifetimePosition curr_position =
952 LifetimePosition::FromInstructionIndex(index);
953
954 Instruction* instr = InstructionAt(index);
955 DCHECK(instr != NULL);
956 if (instr->IsGapMoves()) {
957 // Process the moves of the gap instruction, making their sources live.
958 GapInstruction* gap = code()->GapAt(index);
959
960 // TODO(titzer): no need to create the parallel move if it doesn't exist.
961 ParallelMove* move =
962 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
963 const ZoneList<MoveOperands>* move_operands = move->move_operands();
964 for (int i = 0; i < move_operands->length(); ++i) {
965 MoveOperands* cur = &move_operands->at(i);
966 if (cur->IsIgnored()) continue;
967 InstructionOperand* from = cur->source();
968 InstructionOperand* to = cur->destination();
969 InstructionOperand* hint = to;
970 if (to->IsUnallocated()) {
971 int to_vreg = UnallocatedOperand::cast(to)->virtual_register();
972 LiveRange* to_range = LiveRangeFor(to_vreg);
973 if (to_range->is_phi()) {
974 if (to_range->is_non_loop_phi()) {
975 hint = to_range->current_hint_operand();
976 }
977 } else {
978 if (live->Contains(to_vreg)) {
979 Define(curr_position, to, from);
980 live->Remove(to_vreg);
981 } else {
982 cur->Eliminate();
983 continue;
984 }
985 }
986 } else {
987 Define(curr_position, to, from);
988 }
989 Use(block_start_position, curr_position, from, hint);
990 if (from->IsUnallocated()) {
991 live->Add(UnallocatedOperand::cast(from)->virtual_register());
992 }
993 }
994 } else {
995 // Process output, inputs, and temps of this non-gap instruction.
996 for (size_t i = 0; i < instr->OutputCount(); i++) {
997 InstructionOperand* output = instr->OutputAt(i);
998 if (output->IsUnallocated()) {
999 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
1000 live->Remove(out_vreg);
1001 } else if (output->IsConstant()) {
1002 int out_vreg = output->index();
1003 live->Remove(out_vreg);
1004 }
1005 Define(curr_position, output, NULL);
1006 }
1007
1008 if (instr->ClobbersRegisters()) {
1009 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
1010 if (!IsOutputRegisterOf(instr, i)) {
1011 LiveRange* range = FixedLiveRangeFor(i);
1012 range->AddUseInterval(curr_position, curr_position.InstructionEnd(),
1013 zone());
1014 }
1015 }
1016 }
1017
1018 if (instr->ClobbersDoubleRegisters()) {
1019 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1020 if (!IsOutputDoubleRegisterOf(instr, i)) {
1021 LiveRange* range = FixedDoubleLiveRangeFor(i);
1022 range->AddUseInterval(curr_position, curr_position.InstructionEnd(),
1023 zone());
1024 }
1025 }
1026 }
1027
1028 for (size_t i = 0; i < instr->InputCount(); i++) {
1029 InstructionOperand* input = instr->InputAt(i);
1030 if (input->IsImmediate()) continue; // Ignore immediates.
1031 LifetimePosition use_pos;
1032 if (input->IsUnallocated() &&
1033 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
1034 use_pos = curr_position;
1035 } else {
1036 use_pos = curr_position.InstructionEnd();
1037 }
1038
1039 Use(block_start_position, use_pos, input, NULL);
1040 if (input->IsUnallocated()) {
1041 live->Add(UnallocatedOperand::cast(input)->virtual_register());
1042 }
1043 }
1044
1045 for (size_t i = 0; i < instr->TempCount(); i++) {
1046 InstructionOperand* temp = instr->TempAt(i);
1047 if (instr->ClobbersTemps()) {
1048 if (temp->IsRegister()) continue;
1049 if (temp->IsUnallocated()) {
1050 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
1051 if (temp_unalloc->HasFixedPolicy()) {
1052 continue;
1053 }
1054 }
1055 }
1056 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1057 Define(curr_position, temp, NULL);
1058 }
1059 }
1060 }
1061}
1062
1063
1064void RegisterAllocator::ResolvePhis(BasicBlock* block) {
1065 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) {
1066 Node* phi = *i;
1067 if (phi->opcode() != IrOpcode::kPhi) continue;
1068
1069 UnallocatedOperand* phi_operand =
1070 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE);
1071 phi_operand->set_virtual_register(phi->id());
1072
1073 int j = 0;
1074 Node::Inputs inputs = phi->inputs();
1075 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end();
1076 ++iter, ++j) {
1077 Node* op = *iter;
1078 // TODO(mstarzinger): Use a ValueInputIterator instead.
1079 if (j >= block->PredecessorCount()) continue;
1080 UnallocatedOperand* operand =
1081 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
1082 operand->set_virtual_register(op->id());
1083 BasicBlock* cur_block = block->PredecessorAt(j);
1084 // The gap move must be added without any special processing as in
1085 // the AddConstraintsGapMove.
1086 code()->AddGapMove(cur_block->last_instruction_index() - 1, operand,
1087 phi_operand);
1088
1089 Instruction* branch = InstructionAt(cur_block->last_instruction_index());
1090 DCHECK(!branch->HasPointerMap());
1091 USE(branch);
1092 }
1093
1094 LiveRange* live_range = LiveRangeFor(phi->id());
1095 BlockStartInstruction* block_start = code()->GetBlockStart(block);
1096 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1097 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone());
1098 live_range->SetSpillStartIndex(block->first_instruction_index());
1099
1100 // We use the phi-ness of some nodes in some later heuristics.
1101 live_range->set_is_phi(true);
1102 if (!block->IsLoopHeader()) {
1103 live_range->set_is_non_loop_phi(true);
1104 }
1105 }
1106}
1107
1108
1109bool RegisterAllocator::Allocate() {
1110 assigned_registers_ = new (code_zone())
1111 BitVector(Register::NumAllocatableRegisters(), code_zone());
1112 assigned_double_registers_ = new (code_zone())
1113 BitVector(DoubleRegister::NumAllocatableRegisters(), code_zone());
1114 MeetRegisterConstraints();
1115 if (!AllocationOk()) return false;
1116 ResolvePhis();
1117 BuildLiveRanges();
1118 AllocateGeneralRegisters();
1119 if (!AllocationOk()) return false;
1120 AllocateDoubleRegisters();
1121 if (!AllocationOk()) return false;
1122 PopulatePointerMaps();
1123 ConnectRanges();
1124 ResolveControlFlow();
1125 code()->frame()->SetAllocatedRegisters(assigned_registers_);
1126 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1127 return true;
1128}
1129
1130
1131void RegisterAllocator::MeetRegisterConstraints() {
1132 RegisterAllocatorPhase phase("L_Register constraints", this);
1133 for (int i = 0; i < code()->BasicBlockCount(); ++i) {
1134 MeetRegisterConstraints(code()->BlockAt(i));
1135 if (!AllocationOk()) return;
1136 }
1137}
1138
1139
1140void RegisterAllocator::ResolvePhis() {
1141 RegisterAllocatorPhase phase("L_Resolve phis", this);
1142
1143 // Process the blocks in reverse order.
1144 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) {
1145 ResolvePhis(code()->BlockAt(i));
1146 }
1147}
1148
1149
1150void RegisterAllocator::ResolveControlFlow(LiveRange* range, BasicBlock* block,
1151 BasicBlock* pred) {
1152 LifetimePosition pred_end =
1153 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1154 LifetimePosition cur_start =
1155 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1156 LiveRange* pred_cover = NULL;
1157 LiveRange* cur_cover = NULL;
1158 LiveRange* cur_range = range;
1159 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1160 if (cur_range->CanCover(cur_start)) {
1161 DCHECK(cur_cover == NULL);
1162 cur_cover = cur_range;
1163 }
1164 if (cur_range->CanCover(pred_end)) {
1165 DCHECK(pred_cover == NULL);
1166 pred_cover = cur_range;
1167 }
1168 cur_range = cur_range->next();
1169 }
1170
1171 if (cur_cover->IsSpilled()) return;
1172 DCHECK(pred_cover != NULL && cur_cover != NULL);
1173 if (pred_cover != cur_cover) {
1174 InstructionOperand* pred_op =
1175 pred_cover->CreateAssignedOperand(code_zone());
1176 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1177 if (!pred_op->Equals(cur_op)) {
1178 GapInstruction* gap = NULL;
1179 if (block->PredecessorCount() == 1) {
1180 gap = code()->GapAt(block->first_instruction_index());
1181 } else {
1182 DCHECK(pred->SuccessorCount() == 1);
1183 gap = GetLastGap(pred);
1184
1185 Instruction* branch = InstructionAt(pred->last_instruction_index());
1186 DCHECK(!branch->HasPointerMap());
1187 USE(branch);
1188 }
1189 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1190 ->AddMove(pred_op, cur_op, code_zone());
1191 }
1192 }
1193}
1194
1195
1196ParallelMove* RegisterAllocator::GetConnectingParallelMove(
1197 LifetimePosition pos) {
1198 int index = pos.InstructionIndex();
1199 if (code()->IsGapAt(index)) {
1200 GapInstruction* gap = code()->GapAt(index);
1201 return gap->GetOrCreateParallelMove(
1202 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END,
1203 code_zone());
1204 }
1205 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1206 return code()->GapAt(gap_pos)->GetOrCreateParallelMove(
1207 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE,
1208 code_zone());
1209}
1210
1211
1212BasicBlock* RegisterAllocator::GetBlock(LifetimePosition pos) {
1213 return code()->GetBasicBlock(pos.InstructionIndex());
1214}
1215
1216
1217void RegisterAllocator::ConnectRanges() {
1218 RegisterAllocatorPhase phase("L_Connect ranges", this);
1219 for (int i = 0; i < live_ranges()->length(); ++i) {
1220 LiveRange* first_range = live_ranges()->at(i);
1221 if (first_range == NULL || first_range->parent() != NULL) continue;
1222
1223 LiveRange* second_range = first_range->next();
1224 while (second_range != NULL) {
1225 LifetimePosition pos = second_range->Start();
1226
1227 if (!second_range->IsSpilled()) {
1228 // Add gap move if the two live ranges touch and there is no block
1229 // boundary.
1230 if (first_range->End().Value() == pos.Value()) {
1231 bool should_insert = true;
1232 if (IsBlockBoundary(pos)) {
1233 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1234 }
1235 if (should_insert) {
1236 ParallelMove* move = GetConnectingParallelMove(pos);
1237 InstructionOperand* prev_operand =
1238 first_range->CreateAssignedOperand(code_zone());
1239 InstructionOperand* cur_operand =
1240 second_range->CreateAssignedOperand(code_zone());
1241 move->AddMove(prev_operand, cur_operand, code_zone());
1242 }
1243 }
1244 }
1245
1246 first_range = second_range;
1247 second_range = second_range->next();
1248 }
1249 }
1250}
1251
1252
1253bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const {
1254 if (block->PredecessorCount() != 1) return false;
1255 return block->PredecessorAt(0)->rpo_number_ == block->rpo_number_ - 1;
1256}
1257
1258
1259void RegisterAllocator::ResolveControlFlow() {
1260 RegisterAllocatorPhase phase("L_Resolve control flow", this);
1261 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) {
1262 BasicBlock* block = code()->BlockAt(block_id);
1263 if (CanEagerlyResolveControlFlow(block)) continue;
1264 BitVector* live = live_in_sets_[block->rpo_number_];
1265 BitVector::Iterator iterator(live);
1266 while (!iterator.Done()) {
1267 int operand_index = iterator.Current();
1268 BasicBlock::Predecessors predecessors = block->predecessors();
1269 for (BasicBlock::Predecessors::iterator i = predecessors.begin();
1270 i != predecessors.end(); ++i) {
1271 BasicBlock* cur = *i;
1272 LiveRange* cur_range = LiveRangeFor(operand_index);
1273 ResolveControlFlow(cur_range, block, cur);
1274 }
1275 iterator.Advance();
1276 }
1277 }
1278}
1279
1280
1281void RegisterAllocator::BuildLiveRanges() {
1282 RegisterAllocatorPhase phase("L_Build live ranges", this);
1283 InitializeLivenessAnalysis();
1284 // Process the blocks in reverse order.
1285 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0;
1286 --block_id) {
1287 BasicBlock* block = code()->BlockAt(block_id);
1288 BitVector* live = ComputeLiveOut(block);
1289 // Initially consider all live_out values live for the entire block. We
1290 // will shorten these intervals if necessary.
1291 AddInitialIntervals(block, live);
1292
1293 // Process the instructions in reverse order, generating and killing
1294 // live values.
1295 ProcessInstructions(block, live);
1296 // All phi output operands are killed by this block.
1297 for (BasicBlock::const_iterator i = block->begin(); i != block->end();
1298 ++i) {
1299 Node* phi = *i;
1300 if (phi->opcode() != IrOpcode::kPhi) continue;
1301
1302 // The live range interval already ends at the first instruction of the
1303 // block.
1304 live->Remove(phi->id());
1305
1306 InstructionOperand* hint = NULL;
1307 InstructionOperand* phi_operand = NULL;
1308 GapInstruction* gap = GetLastGap(block->PredecessorAt(0));
1309
1310 // TODO(titzer): no need to create the parallel move if it doesn't exit.
1311 ParallelMove* move =
1312 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
1313 for (int j = 0; j < move->move_operands()->length(); ++j) {
1314 InstructionOperand* to = move->move_operands()->at(j).destination();
1315 if (to->IsUnallocated() &&
1316 UnallocatedOperand::cast(to)->virtual_register() == phi->id()) {
1317 hint = move->move_operands()->at(j).source();
1318 phi_operand = to;
1319 break;
1320 }
1321 }
1322 DCHECK(hint != NULL);
1323
1324 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1325 block->first_instruction_index());
1326 Define(block_start, phi_operand, hint);
1327 }
1328
1329 // Now live is live_in for this block except not including values live
1330 // out on backward successor edges.
1331 live_in_sets_[block_id] = live;
1332
1333 if (block->IsLoopHeader()) {
1334 // Add a live range stretching from the first loop instruction to the last
1335 // for each value live on entry to the header.
1336 BitVector::Iterator iterator(live);
1337 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1338 block->first_instruction_index());
1339 int end_index =
1340 code()->BlockAt(block->loop_end_)->last_instruction_index();
1341 LifetimePosition end =
1342 LifetimePosition::FromInstructionIndex(end_index).NextInstruction();
1343 while (!iterator.Done()) {
1344 int operand_index = iterator.Current();
1345 LiveRange* range = LiveRangeFor(operand_index);
1346 range->EnsureInterval(start, end, zone());
1347 iterator.Advance();
1348 }
1349
1350 // Insert all values into the live in sets of all blocks in the loop.
1351 for (int i = block->rpo_number_ + 1; i < block->loop_end_; ++i) {
1352 live_in_sets_[i]->Union(*live);
1353 }
1354 }
1355
1356#ifdef DEBUG
1357 if (block_id == 0) {
1358 BitVector::Iterator iterator(live);
1359 bool found = false;
1360 while (!iterator.Done()) {
1361 found = true;
1362 int operand_index = iterator.Current();
1363 PrintF("Register allocator error: live v%d reached first block.\n",
1364 operand_index);
1365 LiveRange* range = LiveRangeFor(operand_index);
1366 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value());
1367 CompilationInfo* info = code()->linkage()->info();
1368 if (info->IsStub()) {
1369 if (info->code_stub() == NULL) {
1370 PrintF("\n");
1371 } else {
1372 CodeStub::Major major_key = info->code_stub()->MajorKey();
1373 PrintF(" (function: %s)\n", CodeStub::MajorName(major_key, false));
1374 }
1375 } else {
1376 DCHECK(info->IsOptimizing());
1377 AllowHandleDereference allow_deref;
1378 PrintF(" (function: %s)\n",
1379 info->function()->debug_name()->ToCString().get());
1380 }
1381 iterator.Advance();
1382 }
1383 DCHECK(!found);
1384 }
1385#endif
1386 }
1387
1388 for (int i = 0; i < live_ranges_.length(); ++i) {
1389 if (live_ranges_[i] != NULL) {
1390 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1391
1392 // TODO(bmeurer): This is a horrible hack to make sure that for constant
1393 // live ranges, every use requires the constant to be in a register.
1394 // Without this hack, all uses with "any" policy would get the constant
1395 // operand assigned.
1396 LiveRange* range = live_ranges_[i];
1397 if (range->HasAllocatedSpillOperand() &&
1398 range->GetSpillOperand()->IsConstant()) {
1399 for (UsePosition* pos = range->first_pos(); pos != NULL;
1400 pos = pos->next_) {
1401 pos->register_beneficial_ = true;
1402 pos->requires_reg_ = true;
1403 }
1404 }
1405 }
1406 }
1407}
1408
1409
1410bool RegisterAllocator::SafePointsAreInOrder() const {
1411 int safe_point = 0;
1412 const PointerMapDeque* pointer_maps = code()->pointer_maps();
1413 for (PointerMapDeque::const_iterator it = pointer_maps->begin();
1414 it != pointer_maps->end(); ++it) {
1415 PointerMap* map = *it;
1416 if (safe_point > map->instruction_position()) return false;
1417 safe_point = map->instruction_position();
1418 }
1419 return true;
1420}
1421
1422
1423void RegisterAllocator::PopulatePointerMaps() {
1424 RegisterAllocatorPhase phase("L_Populate pointer maps", this);
1425
1426 DCHECK(SafePointsAreInOrder());
1427
1428 // Iterate over all safe point positions and record a pointer
1429 // for all spilled live ranges at this point.
1430 int last_range_start = 0;
1431 const PointerMapDeque* pointer_maps = code()->pointer_maps();
1432 PointerMapDeque::const_iterator first_it = pointer_maps->begin();
1433 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1434 LiveRange* range = live_ranges()->at(range_idx);
1435 if (range == NULL) continue;
1436 // Iterate over the first parts of multi-part live ranges.
1437 if (range->parent() != NULL) continue;
1438 // Skip non-reference values.
1439 if (!HasTaggedValue(range->id())) continue;
1440 // Skip empty live ranges.
1441 if (range->IsEmpty()) continue;
1442
1443 // Find the extent of the range and its children.
1444 int start = range->Start().InstructionIndex();
1445 int end = 0;
1446 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1447 LifetimePosition this_end = cur->End();
1448 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1449 DCHECK(cur->Start().InstructionIndex() >= start);
1450 }
1451
1452 // Most of the ranges are in order, but not all. Keep an eye on when they
1453 // step backwards and reset the first_it so we don't miss any safe points.
1454 if (start < last_range_start) first_it = pointer_maps->begin();
1455 last_range_start = start;
1456
1457 // Step across all the safe points that are before the start of this range,
1458 // recording how far we step in order to save doing this for the next range.
1459 for (; first_it != pointer_maps->end(); ++first_it) {
1460 PointerMap* map = *first_it;
1461 if (map->instruction_position() >= start) break;
1462 }
1463
1464 // Step through the safe points to see whether they are in the range.
1465 for (PointerMapDeque::const_iterator it = first_it;
1466 it != pointer_maps->end(); ++it) {
1467 PointerMap* map = *it;
1468 int safe_point = map->instruction_position();
1469
1470 // The safe points are sorted so we can stop searching here.
1471 if (safe_point - 1 > end) break;
1472
1473 // Advance to the next active range that covers the current
1474 // safe point position.
1475 LifetimePosition safe_point_pos =
1476 LifetimePosition::FromInstructionIndex(safe_point);
1477 LiveRange* cur = range;
1478 while (cur != NULL && !cur->Covers(safe_point_pos)) {
1479 cur = cur->next();
1480 }
1481 if (cur == NULL) continue;
1482
1483 // Check if the live range is spilled and the safe point is after
1484 // the spill position.
1485 if (range->HasAllocatedSpillOperand() &&
1486 safe_point >= range->spill_start_index() &&
1487 !range->GetSpillOperand()->IsConstant()) {
1488 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1489 range->id(), range->spill_start_index(), safe_point);
1490 map->RecordPointer(range->GetSpillOperand(), code_zone());
1491 }
1492
1493 if (!cur->IsSpilled()) {
1494 TraceAlloc(
1495 "Pointer in register for range %d (start at %d) "
1496 "at safe point %d\n",
1497 cur->id(), cur->Start().Value(), safe_point);
1498 InstructionOperand* operand = cur->CreateAssignedOperand(code_zone());
1499 DCHECK(!operand->IsStackSlot());
1500 map->RecordPointer(operand, code_zone());
1501 }
1502 }
1503 }
1504}
1505
1506
1507void RegisterAllocator::AllocateGeneralRegisters() {
1508 RegisterAllocatorPhase phase("L_Allocate general registers", this);
1509 num_registers_ = Register::NumAllocatableRegisters();
1510 mode_ = GENERAL_REGISTERS;
1511 AllocateRegisters();
1512}
1513
1514
1515void RegisterAllocator::AllocateDoubleRegisters() {
1516 RegisterAllocatorPhase phase("L_Allocate double registers", this);
1517 num_registers_ = DoubleRegister::NumAllocatableRegisters();
1518 mode_ = DOUBLE_REGISTERS;
1519 AllocateRegisters();
1520}
1521
1522
1523void RegisterAllocator::AllocateRegisters() {
1524 DCHECK(unhandled_live_ranges_.is_empty());
1525
1526 for (int i = 0; i < live_ranges_.length(); ++i) {
1527 if (live_ranges_[i] != NULL) {
1528 if (live_ranges_[i]->Kind() == mode_) {
1529 AddToUnhandledUnsorted(live_ranges_[i]);
1530 }
1531 }
1532 }
1533 SortUnhandled();
1534 DCHECK(UnhandledIsSorted());
1535
1536 DCHECK(reusable_slots_.is_empty());
1537 DCHECK(active_live_ranges_.is_empty());
1538 DCHECK(inactive_live_ranges_.is_empty());
1539
1540 if (mode_ == DOUBLE_REGISTERS) {
1541 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1542 LiveRange* current = fixed_double_live_ranges_.at(i);
1543 if (current != NULL) {
1544 AddToInactive(current);
1545 }
1546 }
1547 } else {
1548 DCHECK(mode_ == GENERAL_REGISTERS);
1549 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1550 LiveRange* current = fixed_live_ranges_.at(i);
1551 if (current != NULL) {
1552 AddToInactive(current);
1553 }
1554 }
1555 }
1556
1557 while (!unhandled_live_ranges_.is_empty()) {
1558 DCHECK(UnhandledIsSorted());
1559 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1560 DCHECK(UnhandledIsSorted());
1561 LifetimePosition position = current->Start();
1562#ifdef DEBUG
1563 allocation_finger_ = position;
1564#endif
1565 TraceAlloc("Processing interval %d start=%d\n", current->id(),
1566 position.Value());
1567
1568 if (current->HasAllocatedSpillOperand()) {
1569 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1570 LifetimePosition next_pos = position;
1571 if (code()->IsGapAt(next_pos.InstructionIndex())) {
1572 next_pos = next_pos.NextInstruction();
1573 }
1574 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1575 // If the range already has a spill operand and it doesn't need a
1576 // register immediately, split it and spill the first part of the range.
1577 if (pos == NULL) {
1578 Spill(current);
1579 continue;
1580 } else if (pos->pos().Value() >
1581 current->Start().NextInstruction().Value()) {
1582 // Do not spill live range eagerly if use position that can benefit from
1583 // the register is too close to the start of live range.
1584 SpillBetween(current, current->Start(), pos->pos());
1585 if (!AllocationOk()) return;
1586 DCHECK(UnhandledIsSorted());
1587 continue;
1588 }
1589 }
1590
1591 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1592 LiveRange* cur_active = active_live_ranges_.at(i);
1593 if (cur_active->End().Value() <= position.Value()) {
1594 ActiveToHandled(cur_active);
1595 --i; // The live range was removed from the list of active live ranges.
1596 } else if (!cur_active->Covers(position)) {
1597 ActiveToInactive(cur_active);
1598 --i; // The live range was removed from the list of active live ranges.
1599 }
1600 }
1601
1602 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1603 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1604 if (cur_inactive->End().Value() <= position.Value()) {
1605 InactiveToHandled(cur_inactive);
1606 --i; // Live range was removed from the list of inactive live ranges.
1607 } else if (cur_inactive->Covers(position)) {
1608 InactiveToActive(cur_inactive);
1609 --i; // Live range was removed from the list of inactive live ranges.
1610 }
1611 }
1612
1613 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
1614
1615 bool result = TryAllocateFreeReg(current);
1616 if (!AllocationOk()) return;
1617
1618 if (!result) AllocateBlockedReg(current);
1619 if (!AllocationOk()) return;
1620
1621 if (current->HasRegisterAssigned()) {
1622 AddToActive(current);
1623 }
1624 }
1625
1626 reusable_slots_.Rewind(0);
1627 active_live_ranges_.Rewind(0);
1628 inactive_live_ranges_.Rewind(0);
1629}
1630
1631
1632const char* RegisterAllocator::RegisterName(int allocation_index) {
1633 if (mode_ == GENERAL_REGISTERS) {
1634 return Register::AllocationIndexToString(allocation_index);
1635 } else {
1636 return DoubleRegister::AllocationIndexToString(allocation_index);
1637 }
1638}
1639
1640
1641void RegisterAllocator::TraceAlloc(const char* msg, ...) {
1642 if (FLAG_trace_alloc) {
1643 va_list arguments;
1644 va_start(arguments, msg);
1645 base::OS::VPrint(msg, arguments);
1646 va_end(arguments);
1647 }
1648}
1649
1650
1651bool RegisterAllocator::HasTaggedValue(int virtual_register) const {
1652 return code()->IsReference(virtual_register);
1653}
1654
1655
1656RegisterKind RegisterAllocator::RequiredRegisterKind(
1657 int virtual_register) const {
1658 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
1659 : GENERAL_REGISTERS;
1660}
1661
1662
1663void RegisterAllocator::AddToActive(LiveRange* range) {
1664 TraceAlloc("Add live range %d to active\n", range->id());
1665 active_live_ranges_.Add(range, zone());
1666}
1667
1668
1669void RegisterAllocator::AddToInactive(LiveRange* range) {
1670 TraceAlloc("Add live range %d to inactive\n", range->id());
1671 inactive_live_ranges_.Add(range, zone());
1672}
1673
1674
1675void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) {
1676 if (range == NULL || range->IsEmpty()) return;
1677 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1678 DCHECK(allocation_finger_.Value() <= range->Start().Value());
1679 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1680 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1681 if (range->ShouldBeAllocatedBefore(cur_range)) {
1682 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1683 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1684 DCHECK(UnhandledIsSorted());
1685 return;
1686 }
1687 }
1688 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1689 unhandled_live_ranges_.InsertAt(0, range, zone());
1690 DCHECK(UnhandledIsSorted());
1691}
1692
1693
1694void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1695 if (range == NULL || range->IsEmpty()) return;
1696 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1697 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1698 unhandled_live_ranges_.Add(range, zone());
1699}
1700
1701
1702static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1703 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
1704 !(*b)->ShouldBeAllocatedBefore(*a));
1705 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1706 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1707 return (*a)->id() - (*b)->id();
1708}
1709
1710
1711// Sort the unhandled live ranges so that the ranges to be processed first are
1712// at the end of the array list. This is convenient for the register allocation
1713// algorithm because it is efficient to remove elements from the end.
1714void RegisterAllocator::SortUnhandled() {
1715 TraceAlloc("Sort unhandled\n");
1716 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1717}
1718
1719
1720bool RegisterAllocator::UnhandledIsSorted() {
1721 int len = unhandled_live_ranges_.length();
1722 for (int i = 1; i < len; i++) {
1723 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1724 LiveRange* b = unhandled_live_ranges_.at(i);
1725 if (a->Start().Value() < b->Start().Value()) return false;
1726 }
1727 return true;
1728}
1729
1730
1731void RegisterAllocator::FreeSpillSlot(LiveRange* range) {
1732 // Check that we are the last range.
1733 if (range->next() != NULL) return;
1734
1735 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1736
1737 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1738 if (spill_operand->IsConstant()) return;
1739 if (spill_operand->index() >= 0) {
1740 reusable_slots_.Add(range, zone());
1741 }
1742}
1743
1744
1745InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) {
1746 if (reusable_slots_.is_empty()) return NULL;
1747 if (reusable_slots_.first()->End().Value() >
1748 range->TopLevel()->Start().Value()) {
1749 return NULL;
1750 }
1751 InstructionOperand* result =
1752 reusable_slots_.first()->TopLevel()->GetSpillOperand();
1753 reusable_slots_.Remove(0);
1754 return result;
1755}
1756
1757
1758void RegisterAllocator::ActiveToHandled(LiveRange* range) {
1759 DCHECK(active_live_ranges_.Contains(range));
1760 active_live_ranges_.RemoveElement(range);
1761 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1762 FreeSpillSlot(range);
1763}
1764
1765
1766void RegisterAllocator::ActiveToInactive(LiveRange* range) {
1767 DCHECK(active_live_ranges_.Contains(range));
1768 active_live_ranges_.RemoveElement(range);
1769 inactive_live_ranges_.Add(range, zone());
1770 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1771}
1772
1773
1774void RegisterAllocator::InactiveToHandled(LiveRange* range) {
1775 DCHECK(inactive_live_ranges_.Contains(range));
1776 inactive_live_ranges_.RemoveElement(range);
1777 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1778 FreeSpillSlot(range);
1779}
1780
1781
1782void RegisterAllocator::InactiveToActive(LiveRange* range) {
1783 DCHECK(inactive_live_ranges_.Contains(range));
1784 inactive_live_ranges_.RemoveElement(range);
1785 active_live_ranges_.Add(range, zone());
1786 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1787}
1788
1789
1790// TryAllocateFreeReg and AllocateBlockedReg assume this
1791// when allocating local arrays.
1792STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
1793 Register::kMaxNumAllocatableRegisters);
1794
1795
1796bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) {
1797 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1798
1799 for (int i = 0; i < num_registers_; i++) {
1800 free_until_pos[i] = LifetimePosition::MaxPosition();
1801 }
1802
1803 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1804 LiveRange* cur_active = active_live_ranges_.at(i);
1805 free_until_pos[cur_active->assigned_register()] =
1806 LifetimePosition::FromInstructionIndex(0);
1807 }
1808
1809 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1810 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1811 DCHECK(cur_inactive->End().Value() > current->Start().Value());
1812 LifetimePosition next_intersection =
1813 cur_inactive->FirstIntersection(current);
1814 if (!next_intersection.IsValid()) continue;
1815 int cur_reg = cur_inactive->assigned_register();
1816 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1817 }
1818
1819 InstructionOperand* hint = current->FirstHint();
1820 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1821 int register_index = hint->index();
1822 TraceAlloc(
1823 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1824 RegisterName(register_index), free_until_pos[register_index].Value(),
1825 current->id(), current->End().Value());
1826
1827 // The desired register is free until the end of the current live range.
1828 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1829 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1830 RegisterName(register_index), current->id());
1831 SetLiveRangeAssignedRegister(current, register_index);
1832 return true;
1833 }
1834 }
1835
1836 // Find the register which stays free for the longest time.
1837 int reg = 0;
1838 for (int i = 1; i < RegisterCount(); ++i) {
1839 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1840 reg = i;
1841 }
1842 }
1843
1844 LifetimePosition pos = free_until_pos[reg];
1845
1846 if (pos.Value() <= current->Start().Value()) {
1847 // All registers are blocked.
1848 return false;
1849 }
1850
1851 if (pos.Value() < current->End().Value()) {
1852 // Register reg is available at the range start but becomes blocked before
1853 // the range end. Split current at position where it becomes blocked.
1854 LiveRange* tail = SplitRangeAt(current, pos);
1855 if (!AllocationOk()) return false;
1856 AddToUnhandledSorted(tail);
1857 }
1858
1859
1860 // Register reg is available at the range start and is free until
1861 // the range end.
1862 DCHECK(pos.Value() >= current->End().Value());
1863 TraceAlloc("Assigning free reg %s to live range %d\n", RegisterName(reg),
1864 current->id());
1865 SetLiveRangeAssignedRegister(current, reg);
1866
1867 return true;
1868}
1869
1870
1871void RegisterAllocator::AllocateBlockedReg(LiveRange* current) {
1872 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1873 if (register_use == NULL) {
1874 // There is no use in the current live range that requires a register.
1875 // We can just spill it.
1876 Spill(current);
1877 return;
1878 }
1879
1880
1881 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1882 LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1883
1884 for (int i = 0; i < num_registers_; i++) {
1885 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1886 }
1887
1888 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1889 LiveRange* range = active_live_ranges_[i];
1890 int cur_reg = range->assigned_register();
1891 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1892 block_pos[cur_reg] = use_pos[cur_reg] =
1893 LifetimePosition::FromInstructionIndex(0);
1894 } else {
1895 UsePosition* next_use =
1896 range->NextUsePositionRegisterIsBeneficial(current->Start());
1897 if (next_use == NULL) {
1898 use_pos[cur_reg] = range->End();
1899 } else {
1900 use_pos[cur_reg] = next_use->pos();
1901 }
1902 }
1903 }
1904
1905 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1906 LiveRange* range = inactive_live_ranges_.at(i);
1907 DCHECK(range->End().Value() > current->Start().Value());
1908 LifetimePosition next_intersection = range->FirstIntersection(current);
1909 if (!next_intersection.IsValid()) continue;
1910 int cur_reg = range->assigned_register();
1911 if (range->IsFixed()) {
1912 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1913 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1914 } else {
1915 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1916 }
1917 }
1918
1919 int reg = 0;
1920 for (int i = 1; i < RegisterCount(); ++i) {
1921 if (use_pos[i].Value() > use_pos[reg].Value()) {
1922 reg = i;
1923 }
1924 }
1925
1926 LifetimePosition pos = use_pos[reg];
1927
1928 if (pos.Value() < register_use->pos().Value()) {
1929 // All registers are blocked before the first use that requires a register.
1930 // Spill starting part of live range up to that use.
1931 SpillBetween(current, current->Start(), register_use->pos());
1932 return;
1933 }
1934
1935 if (block_pos[reg].Value() < current->End().Value()) {
1936 // Register becomes blocked before the current range end. Split before that
1937 // position.
1938 LiveRange* tail = SplitBetween(current, current->Start(),
1939 block_pos[reg].InstructionStart());
1940 if (!AllocationOk()) return;
1941 AddToUnhandledSorted(tail);
1942 }
1943
1944 // Register reg is not blocked for the whole range.
1945 DCHECK(block_pos[reg].Value() >= current->End().Value());
1946 TraceAlloc("Assigning blocked reg %s to live range %d\n", RegisterName(reg),
1947 current->id());
1948 SetLiveRangeAssignedRegister(current, reg);
1949
1950 // This register was not free. Thus we need to find and spill
1951 // parts of active and inactive live regions that use the same register
1952 // at the same lifetime positions as current.
1953 SplitAndSpillIntersecting(current);
1954}
1955
1956
1957LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
1958 LiveRange* range, LifetimePosition pos) {
1959 BasicBlock* block = GetBlock(pos.InstructionStart());
1960 BasicBlock* loop_header =
1961 block->IsLoopHeader() ? block : code()->GetContainingLoop(block);
1962
1963 if (loop_header == NULL) return pos;
1964
1965 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
1966
1967 while (loop_header != NULL) {
1968 // We are going to spill live range inside the loop.
1969 // If possible try to move spilling position backwards to loop header.
1970 // This will reduce number of memory moves on the back edge.
1971 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1972 loop_header->first_instruction_index());
1973
1974 if (range->Covers(loop_start)) {
1975 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1976 // No register beneficial use inside the loop before the pos.
1977 pos = loop_start;
1978 }
1979 }
1980
1981 // Try hoisting out to an outer loop.
1982 loop_header = code()->GetContainingLoop(loop_header);
1983 }
1984
1985 return pos;
1986}
1987
1988
1989void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1990 DCHECK(current->HasRegisterAssigned());
1991 int reg = current->assigned_register();
1992 LifetimePosition split_pos = current->Start();
1993 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1994 LiveRange* range = active_live_ranges_[i];
1995 if (range->assigned_register() == reg) {
1996 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1997 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1998 if (next_pos == NULL) {
1999 SpillAfter(range, spill_pos);
2000 } else {
2001 // When spilling between spill_pos and next_pos ensure that the range
2002 // remains spilled at least until the start of the current live range.
2003 // This guarantees that we will not introduce new unhandled ranges that
2004 // start before the current range as this violates allocation invariant
2005 // and will lead to an inconsistent state of active and inactive
2006 // live-ranges: ranges are allocated in order of their start positions,
2007 // ranges are retired from active/inactive when the start of the
2008 // current live-range is larger than their end.
2009 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2010 }
2011 if (!AllocationOk()) return;
2012 ActiveToHandled(range);
2013 --i;
2014 }
2015 }
2016
2017 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
2018 LiveRange* range = inactive_live_ranges_[i];
2019 DCHECK(range->End().Value() > current->Start().Value());
2020 if (range->assigned_register() == reg && !range->IsFixed()) {
2021 LifetimePosition next_intersection = range->FirstIntersection(current);
2022 if (next_intersection.IsValid()) {
2023 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2024 if (next_pos == NULL) {
2025 SpillAfter(range, split_pos);
2026 } else {
2027 next_intersection = Min(next_intersection, next_pos->pos());
2028 SpillBetween(range, split_pos, next_intersection);
2029 }
2030 if (!AllocationOk()) return;
2031 InactiveToHandled(range);
2032 --i;
2033 }
2034 }
2035 }
2036}
2037
2038
2039bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) {
2040 return pos.IsInstructionStart() &&
2041 InstructionAt(pos.InstructionIndex())->IsBlockStart();
2042}
2043
2044
2045LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2046 LifetimePosition pos) {
2047 DCHECK(!range->IsFixed());
2048 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2049
2050 if (pos.Value() <= range->Start().Value()) return range;
2051
2052 // We can't properly connect liveranges if split occured at the end
2053 // of control instruction.
2054 DCHECK(pos.IsInstructionStart() ||
2055 !InstructionAt(pos.InstructionIndex())->IsControl());
2056
2057 int vreg = GetVirtualRegister();
2058 if (!AllocationOk()) return NULL;
2059 LiveRange* result = LiveRangeFor(vreg);
2060 range->SplitAt(pos, result, zone());
2061 return result;
2062}
2063
2064
2065LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2066 LifetimePosition start,
2067 LifetimePosition end) {
2068 DCHECK(!range->IsFixed());
2069 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2070 range->id(), start.Value(), end.Value());
2071
2072 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2073 DCHECK(split_pos.Value() >= start.Value());
2074 return SplitRangeAt(range, split_pos);
2075}
2076
2077
2078LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2079 LifetimePosition end) {
2080 int start_instr = start.InstructionIndex();
2081 int end_instr = end.InstructionIndex();
2082 DCHECK(start_instr <= end_instr);
2083
2084 // We have no choice
2085 if (start_instr == end_instr) return end;
2086
2087 BasicBlock* start_block = GetBlock(start);
2088 BasicBlock* end_block = GetBlock(end);
2089
2090 if (end_block == start_block) {
2091 // The interval is split in the same basic block. Split at the latest
2092 // possible position.
2093 return end;
2094 }
2095
2096 BasicBlock* block = end_block;
2097 // Find header of outermost loop.
2098 // TODO(titzer): fix redundancy below.
2099 while (code()->GetContainingLoop(block) != NULL &&
2100 code()->GetContainingLoop(block)->rpo_number_ >
2101 start_block->rpo_number_) {
2102 block = code()->GetContainingLoop(block);
2103 }
2104
2105 // We did not find any suitable outer loop. Split at the latest possible
2106 // position unless end_block is a loop header itself.
2107 if (block == end_block && !end_block->IsLoopHeader()) return end;
2108
2109 return LifetimePosition::FromInstructionIndex(
2110 block->first_instruction_index());
2111}
2112
2113
2114void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2115 LiveRange* second_part = SplitRangeAt(range, pos);
2116 if (!AllocationOk()) return;
2117 Spill(second_part);
2118}
2119
2120
2121void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2122 LifetimePosition end) {
2123 SpillBetweenUntil(range, start, start, end);
2124}
2125
2126
2127void RegisterAllocator::SpillBetweenUntil(LiveRange* range,
2128 LifetimePosition start,
2129 LifetimePosition until,
2130 LifetimePosition end) {
2131 CHECK(start.Value() < end.Value());
2132 LiveRange* second_part = SplitRangeAt(range, start);
2133 if (!AllocationOk()) return;
2134
2135 if (second_part->Start().Value() < end.Value()) {
2136 // The split result intersects with [start, end[.
2137 // Split it at position between ]start+1, end[, spill the middle part
2138 // and put the rest to unhandled.
2139 LiveRange* third_part = SplitBetween(
2140 second_part, Max(second_part->Start().InstructionEnd(), until),
2141 end.PrevInstruction().InstructionEnd());
2142 if (!AllocationOk()) return;
2143
2144 DCHECK(third_part != second_part);
2145
2146 Spill(second_part);
2147 AddToUnhandledSorted(third_part);
2148 } else {
2149 // The split result does not intersect with [start, end[.
2150 // Nothing to spill. Just put it to unhandled as whole.
2151 AddToUnhandledSorted(second_part);
2152 }
2153}
2154
2155
2156void RegisterAllocator::Spill(LiveRange* range) {
2157 DCHECK(!range->IsSpilled());
2158 TraceAlloc("Spilling live range %d\n", range->id());
2159 LiveRange* first = range->TopLevel();
2160
2161 if (!first->HasAllocatedSpillOperand()) {
2162 InstructionOperand* op = TryReuseSpillSlot(range);
2163 if (op == NULL) {
2164 // Allocate a new operand referring to the spill slot.
2165 RegisterKind kind = range->Kind();
2166 int index = code()->frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
2167 if (kind == DOUBLE_REGISTERS) {
2168 op = DoubleStackSlotOperand::Create(index, zone());
2169 } else {
2170 DCHECK(kind == GENERAL_REGISTERS);
2171 op = StackSlotOperand::Create(index, zone());
2172 }
2173 }
2174 first->SetSpillOperand(op);
2175 }
2176 range->MakeSpilled(code_zone());
2177}
2178
2179
2180int RegisterAllocator::RegisterCount() const { return num_registers_; }
2181
2182
2183#ifdef DEBUG
2184
2185
2186void RegisterAllocator::Verify() const {
2187 for (int i = 0; i < live_ranges()->length(); ++i) {
2188 LiveRange* current = live_ranges()->at(i);
2189 if (current != NULL) current->Verify();
2190 }
2191}
2192
2193
2194#endif
2195
2196
2197void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2198 int reg) {
2199 if (range->Kind() == DOUBLE_REGISTERS) {
2200 assigned_double_registers_->Add(reg);
2201 } else {
2202 DCHECK(range->Kind() == GENERAL_REGISTERS);
2203 assigned_registers_->Add(reg);
2204 }
2205 range->set_assigned_register(reg, code_zone());
2206}
2207
2208
2209RegisterAllocatorPhase::RegisterAllocatorPhase(const char* name,
2210 RegisterAllocator* allocator)
2211 : CompilationPhase(name, allocator->code()->linkage()->info()),
2212 allocator_(allocator) {
2213 if (FLAG_turbo_stats) {
2214 allocator_zone_start_allocation_size_ =
2215 allocator->zone()->allocation_size();
2216 }
2217}
2218
2219
2220RegisterAllocatorPhase::~RegisterAllocatorPhase() {
2221 if (FLAG_turbo_stats) {
2222 unsigned size = allocator_->zone()->allocation_size() -
2223 allocator_zone_start_allocation_size_;
2224 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2225 }
2226#ifdef DEBUG
2227 if (allocator_ != NULL) allocator_->Verify();
2228#endif
2229}
2230}
2231}
2232} // namespace v8::internal::compiler