blob: 6155dc0f2379344ef28e1aae27c504cb98e0a5fa [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/lithium-allocator.h"
6
7#include "src/crankshaft/hydrogen.h"
8#include "src/crankshaft/lithium-inl.h"
9#include "src/crankshaft/lithium-allocator-inl.h"
10#include "src/register-configuration.h"
11#include "src/string-stream.h"
12
13namespace v8 {
14namespace internal {
15
16static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
17 return a.Value() < b.Value() ? a : b;
18}
19
20
21static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
22 return a.Value() > b.Value() ? a : b;
23}
24
25
26UsePosition::UsePosition(LifetimePosition pos,
27 LOperand* operand,
28 LOperand* hint)
29 : operand_(operand),
30 hint_(hint),
31 pos_(pos),
32 next_(NULL),
33 requires_reg_(false),
34 register_beneficial_(true) {
35 if (operand_ != NULL && operand_->IsUnallocated()) {
36 LUnallocated* unalloc = LUnallocated::cast(operand_);
37 requires_reg_ = unalloc->HasRegisterPolicy() ||
38 unalloc->HasDoubleRegisterPolicy();
39 register_beneficial_ = !unalloc->HasAnyPolicy();
40 }
41 DCHECK(pos_.IsValid());
42}
43
44
45bool UsePosition::HasHint() const {
46 return hint_ != NULL && !hint_->IsUnallocated();
47}
48
49
50bool UsePosition::RequiresRegister() const {
51 return requires_reg_;
52}
53
54
55bool UsePosition::RegisterIsBeneficial() const {
56 return register_beneficial_;
57}
58
59
60void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
61 DCHECK(Contains(pos) && pos.Value() != start().Value());
62 UseInterval* after = new(zone) UseInterval(pos, end_);
63 after->next_ = next_;
64 next_ = after;
65 end_ = pos;
66}
67
68
69#ifdef DEBUG
70
71
72void LiveRange::Verify() const {
73 UsePosition* cur = first_pos_;
74 while (cur != NULL) {
75 DCHECK(Start().Value() <= cur->pos().Value() &&
76 cur->pos().Value() <= End().Value());
77 cur = cur->next();
78 }
79}
80
81
82bool LiveRange::HasOverlap(UseInterval* target) const {
83 UseInterval* current_interval = first_interval_;
84 while (current_interval != NULL) {
85 // Intervals overlap if the start of one is contained in the other.
86 if (current_interval->Contains(target->start()) ||
87 target->Contains(current_interval->start())) {
88 return true;
89 }
90 current_interval = current_interval->next();
91 }
92 return false;
93}
94
95
96#endif
97
98
99LiveRange::LiveRange(int id, Zone* zone)
100 : id_(id),
101 spilled_(false),
102 kind_(UNALLOCATED_REGISTERS),
103 assigned_register_(kInvalidAssignment),
104 last_interval_(NULL),
105 first_interval_(NULL),
106 first_pos_(NULL),
107 parent_(NULL),
108 next_(NULL),
109 current_interval_(NULL),
110 last_processed_use_(NULL),
111 current_hint_operand_(NULL),
112 spill_operand_(new (zone) LOperand()),
113 spill_start_index_(kMaxInt) {}
114
115
116void LiveRange::set_assigned_register(int reg, Zone* zone) {
117 DCHECK(!HasRegisterAssigned() && !IsSpilled());
118 assigned_register_ = reg;
119 ConvertOperands(zone);
120}
121
122
123void LiveRange::MakeSpilled(Zone* zone) {
124 DCHECK(!IsSpilled());
125 DCHECK(TopLevel()->HasAllocatedSpillOperand());
126 spilled_ = true;
127 assigned_register_ = kInvalidAssignment;
128 ConvertOperands(zone);
129}
130
131
132bool LiveRange::HasAllocatedSpillOperand() const {
133 DCHECK(spill_operand_ != NULL);
134 return !spill_operand_->IsIgnored();
135}
136
137
138void LiveRange::SetSpillOperand(LOperand* operand) {
139 DCHECK(!operand->IsUnallocated());
140 DCHECK(spill_operand_ != NULL);
141 DCHECK(spill_operand_->IsIgnored());
142 spill_operand_->ConvertTo(operand->kind(), operand->index());
143}
144
145
146UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
147 UsePosition* use_pos = last_processed_use_;
148 if (use_pos == NULL) use_pos = first_pos();
149 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
150 use_pos = use_pos->next();
151 }
152 last_processed_use_ = use_pos;
153 return use_pos;
154}
155
156
157UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
158 LifetimePosition start) {
159 UsePosition* pos = NextUsePosition(start);
160 while (pos != NULL && !pos->RegisterIsBeneficial()) {
161 pos = pos->next();
162 }
163 return pos;
164}
165
166
167UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
168 LifetimePosition start) {
169 UsePosition* pos = first_pos();
170 UsePosition* prev = NULL;
171 while (pos != NULL && pos->pos().Value() < start.Value()) {
172 if (pos->RegisterIsBeneficial()) prev = pos;
173 pos = pos->next();
174 }
175 return prev;
176}
177
178
179UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
180 UsePosition* pos = NextUsePosition(start);
181 while (pos != NULL && !pos->RequiresRegister()) {
182 pos = pos->next();
183 }
184 return pos;
185}
186
187
188bool LiveRange::CanBeSpilled(LifetimePosition pos) {
189 // We cannot spill a live range that has a use requiring a register
190 // at the current or the immediate next position.
191 UsePosition* use_pos = NextRegisterPosition(pos);
192 if (use_pos == NULL) return true;
193 return
194 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
195}
196
197
198LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
199 LOperand* op = NULL;
200 if (HasRegisterAssigned()) {
201 DCHECK(!IsSpilled());
202 switch (Kind()) {
203 case GENERAL_REGISTERS:
204 op = LRegister::Create(assigned_register(), zone);
205 break;
206 case DOUBLE_REGISTERS:
207 op = LDoubleRegister::Create(assigned_register(), zone);
208 break;
209 default:
210 UNREACHABLE();
211 }
212 } else if (IsSpilled()) {
213 DCHECK(!HasRegisterAssigned());
214 op = TopLevel()->GetSpillOperand();
215 DCHECK(!op->IsUnallocated());
216 } else {
217 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
218 unalloc->set_virtual_register(id_);
219 op = unalloc;
220 }
221 return op;
222}
223
224
225UseInterval* LiveRange::FirstSearchIntervalForPosition(
226 LifetimePosition position) const {
227 if (current_interval_ == NULL) return first_interval_;
228 if (current_interval_->start().Value() > position.Value()) {
229 current_interval_ = NULL;
230 return first_interval_;
231 }
232 return current_interval_;
233}
234
235
236void LiveRange::AdvanceLastProcessedMarker(
237 UseInterval* to_start_of, LifetimePosition but_not_past) const {
238 if (to_start_of == NULL) return;
239 if (to_start_of->start().Value() > but_not_past.Value()) return;
240 LifetimePosition start =
241 current_interval_ == NULL ? LifetimePosition::Invalid()
242 : current_interval_->start();
243 if (to_start_of->start().Value() > start.Value()) {
244 current_interval_ = to_start_of;
245 }
246}
247
248
249void LiveRange::SplitAt(LifetimePosition position,
250 LiveRange* result,
251 Zone* zone) {
252 DCHECK(Start().Value() < position.Value());
253 DCHECK(result->IsEmpty());
254 // Find the last interval that ends before the position. If the
255 // position is contained in one of the intervals in the chain, we
256 // split that interval and use the first part.
257 UseInterval* current = FirstSearchIntervalForPosition(position);
258
259 // If the split position coincides with the beginning of a use interval
260 // we need to split use positons in a special way.
261 bool split_at_start = false;
262
263 if (current->start().Value() == position.Value()) {
264 // When splitting at start we need to locate the previous use interval.
265 current = first_interval_;
266 }
267
268 while (current != NULL) {
269 if (current->Contains(position)) {
270 current->SplitAt(position, zone);
271 break;
272 }
273 UseInterval* next = current->next();
274 if (next->start().Value() >= position.Value()) {
275 split_at_start = (next->start().Value() == position.Value());
276 break;
277 }
278 current = next;
279 }
280
281 // Partition original use intervals to the two live ranges.
282 UseInterval* before = current;
283 UseInterval* after = before->next();
284 result->last_interval_ = (last_interval_ == before)
285 ? after // Only interval in the range after split.
286 : last_interval_; // Last interval of the original range.
287 result->first_interval_ = after;
288 last_interval_ = before;
289
290 // Find the last use position before the split and the first use
291 // position after it.
292 UsePosition* use_after = first_pos_;
293 UsePosition* use_before = NULL;
294 if (split_at_start) {
295 // The split position coincides with the beginning of a use interval (the
296 // end of a lifetime hole). Use at this position should be attributed to
297 // the split child because split child owns use interval covering it.
298 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
299 use_before = use_after;
300 use_after = use_after->next();
301 }
302 } else {
303 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
304 use_before = use_after;
305 use_after = use_after->next();
306 }
307 }
308
309 // Partition original use positions to the two live ranges.
310 if (use_before != NULL) {
311 use_before->next_ = NULL;
312 } else {
313 first_pos_ = NULL;
314 }
315 result->first_pos_ = use_after;
316
317 // Discard cached iteration state. It might be pointing
318 // to the use that no longer belongs to this live range.
319 last_processed_use_ = NULL;
320 current_interval_ = NULL;
321
322 // Link the new live range in the chain before any of the other
323 // ranges linked from the range before the split.
324 result->parent_ = (parent_ == NULL) ? this : parent_;
325 result->kind_ = result->parent_->kind_;
326 result->next_ = next_;
327 next_ = result;
328
329#ifdef DEBUG
330 Verify();
331 result->Verify();
332#endif
333}
334
335
336// This implements an ordering on live ranges so that they are ordered by their
337// start positions. This is needed for the correctness of the register
338// allocation algorithm. If two live ranges start at the same offset then there
339// is a tie breaker based on where the value is first used. This part of the
340// ordering is merely a heuristic.
341bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
342 LifetimePosition start = Start();
343 LifetimePosition other_start = other->Start();
344 if (start.Value() == other_start.Value()) {
345 UsePosition* pos = first_pos();
346 if (pos == NULL) return false;
347 UsePosition* other_pos = other->first_pos();
348 if (other_pos == NULL) return true;
349 return pos->pos().Value() < other_pos->pos().Value();
350 }
351 return start.Value() < other_start.Value();
352}
353
354
355void LiveRange::ShortenTo(LifetimePosition start) {
356 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
357 DCHECK(first_interval_ != NULL);
358 DCHECK(first_interval_->start().Value() <= start.Value());
359 DCHECK(start.Value() < first_interval_->end().Value());
360 first_interval_->set_start(start);
361}
362
363
364void LiveRange::EnsureInterval(LifetimePosition start,
365 LifetimePosition end,
366 Zone* zone) {
367 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
368 id_,
369 start.Value(),
370 end.Value());
371 LifetimePosition new_end = end;
372 while (first_interval_ != NULL &&
373 first_interval_->start().Value() <= end.Value()) {
374 if (first_interval_->end().Value() > end.Value()) {
375 new_end = first_interval_->end();
376 }
377 first_interval_ = first_interval_->next();
378 }
379
380 UseInterval* new_interval = new(zone) UseInterval(start, new_end);
381 new_interval->next_ = first_interval_;
382 first_interval_ = new_interval;
383 if (new_interval->next() == NULL) {
384 last_interval_ = new_interval;
385 }
386}
387
388
389void LiveRange::AddUseInterval(LifetimePosition start,
390 LifetimePosition end,
391 Zone* zone) {
392 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
393 id_,
394 start.Value(),
395 end.Value());
396 if (first_interval_ == NULL) {
397 UseInterval* interval = new(zone) UseInterval(start, end);
398 first_interval_ = interval;
399 last_interval_ = interval;
400 } else {
401 if (end.Value() == first_interval_->start().Value()) {
402 first_interval_->set_start(start);
403 } else if (end.Value() < first_interval_->start().Value()) {
404 UseInterval* interval = new(zone) UseInterval(start, end);
405 interval->set_next(first_interval_);
406 first_interval_ = interval;
407 } else {
408 // Order of instruction's processing (see ProcessInstructions) guarantees
409 // that each new use interval either precedes or intersects with
410 // last added interval.
411 DCHECK(start.Value() < first_interval_->end().Value());
412 first_interval_->start_ = Min(start, first_interval_->start_);
413 first_interval_->end_ = Max(end, first_interval_->end_);
414 }
415 }
416}
417
418
419void LiveRange::AddUsePosition(LifetimePosition pos,
420 LOperand* operand,
421 LOperand* hint,
422 Zone* zone) {
423 LAllocator::TraceAlloc("Add to live range %d use position %d\n",
424 id_,
425 pos.Value());
426 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
427 UsePosition* prev_hint = NULL;
428 UsePosition* prev = NULL;
429 UsePosition* current = first_pos_;
430 while (current != NULL && current->pos().Value() < pos.Value()) {
431 prev_hint = current->HasHint() ? current : prev_hint;
432 prev = current;
433 current = current->next();
434 }
435
436 if (prev == NULL) {
437 use_pos->set_next(first_pos_);
438 first_pos_ = use_pos;
439 } else {
440 use_pos->next_ = prev->next_;
441 prev->next_ = use_pos;
442 }
443
444 if (prev_hint == NULL && use_pos->HasHint()) {
445 current_hint_operand_ = hint;
446 }
447}
448
449
450void LiveRange::ConvertOperands(Zone* zone) {
451 LOperand* op = CreateAssignedOperand(zone);
452 UsePosition* use_pos = first_pos();
453 while (use_pos != NULL) {
454 DCHECK(Start().Value() <= use_pos->pos().Value() &&
455 use_pos->pos().Value() <= End().Value());
456
457 if (use_pos->HasOperand()) {
458 DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
459 !use_pos->RequiresRegister());
460 use_pos->operand()->ConvertTo(op->kind(), op->index());
461 }
462 use_pos = use_pos->next();
463 }
464}
465
466
467bool LiveRange::CanCover(LifetimePosition position) const {
468 if (IsEmpty()) return false;
469 return Start().Value() <= position.Value() &&
470 position.Value() < End().Value();
471}
472
473
474bool LiveRange::Covers(LifetimePosition position) {
475 if (!CanCover(position)) return false;
476 UseInterval* start_search = FirstSearchIntervalForPosition(position);
477 for (UseInterval* interval = start_search;
478 interval != NULL;
479 interval = interval->next()) {
480 DCHECK(interval->next() == NULL ||
481 interval->next()->start().Value() >= interval->start().Value());
482 AdvanceLastProcessedMarker(interval, position);
483 if (interval->Contains(position)) return true;
484 if (interval->start().Value() > position.Value()) return false;
485 }
486 return false;
487}
488
489
490LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
491 UseInterval* b = other->first_interval();
492 if (b == NULL) return LifetimePosition::Invalid();
493 LifetimePosition advance_last_processed_up_to = b->start();
494 UseInterval* a = FirstSearchIntervalForPosition(b->start());
495 while (a != NULL && b != NULL) {
496 if (a->start().Value() > other->End().Value()) break;
497 if (b->start().Value() > End().Value()) break;
498 LifetimePosition cur_intersection = a->Intersect(b);
499 if (cur_intersection.IsValid()) {
500 return cur_intersection;
501 }
502 if (a->start().Value() < b->start().Value()) {
503 a = a->next();
504 if (a == NULL || a->start().Value() > other->End().Value()) break;
505 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
506 } else {
507 b = b->next();
508 }
509 }
510 return LifetimePosition::Invalid();
511}
512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513LAllocator::LAllocator(int num_values, HGraph* graph)
Ben Murdochda12d292016-06-02 14:46:10 +0100514 : zone_(graph->isolate()->allocator()),
515 chunk_(NULL),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516 live_in_sets_(graph->blocks()->length(), zone()),
517 live_ranges_(num_values * 2, zone()),
518 fixed_live_ranges_(NULL),
519 fixed_double_live_ranges_(NULL),
520 unhandled_live_ranges_(num_values * 2, zone()),
521 active_live_ranges_(8, zone()),
522 inactive_live_ranges_(8, zone()),
523 reusable_slots_(8, zone()),
524 next_virtual_register_(num_values),
525 first_artificial_register_(num_values),
526 mode_(UNALLOCATED_REGISTERS),
527 num_registers_(-1),
528 graph_(graph),
529 has_osr_entry_(false),
530 allocation_ok_(true) {}
531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532void LAllocator::InitializeLivenessAnalysis() {
533 // Initialize the live_in sets for each block to NULL.
534 int block_count = graph_->blocks()->length();
535 live_in_sets_.Initialize(block_count, zone());
536 live_in_sets_.AddBlock(NULL, block_count, zone());
537}
538
539
540BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
541 // Compute live out for the given block, except not including backward
542 // successor edges.
543 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
544
545 // Process all successor blocks.
546 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
547 // Add values live on entry to the successor. Note the successor's
548 // live_in will not be computed yet for backwards edges.
549 HBasicBlock* successor = it.Current();
550 BitVector* live_in = live_in_sets_[successor->block_id()];
551 if (live_in != NULL) live_out->Union(*live_in);
552
553 // All phi input operands corresponding to this successor edge are live
554 // out from this block.
555 int index = successor->PredecessorIndexOf(block);
556 const ZoneList<HPhi*>* phis = successor->phis();
557 for (int i = 0; i < phis->length(); ++i) {
558 HPhi* phi = phis->at(i);
559 if (!phi->OperandAt(index)->IsConstant()) {
560 live_out->Add(phi->OperandAt(index)->id());
561 }
562 }
563 }
564
565 return live_out;
566}
567
568
569void LAllocator::AddInitialIntervals(HBasicBlock* block,
570 BitVector* live_out) {
571 // Add an interval that includes the entire block to the live range for
572 // each live_out value.
573 LifetimePosition start = LifetimePosition::FromInstructionIndex(
574 block->first_instruction_index());
575 LifetimePosition end = LifetimePosition::FromInstructionIndex(
576 block->last_instruction_index()).NextInstruction();
577 BitVector::Iterator iterator(live_out);
578 while (!iterator.Done()) {
579 int operand_index = iterator.Current();
580 LiveRange* range = LiveRangeFor(operand_index);
581 range->AddUseInterval(start, end, zone());
582 iterator.Advance();
583 }
584}
585
586
587int LAllocator::FixedDoubleLiveRangeID(int index) {
588 return -index - 1 - Register::kNumRegisters;
589}
590
591
592LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
593 int pos,
594 bool is_tagged) {
595 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
596 DCHECK(operand->HasFixedPolicy());
597 if (operand->HasFixedSlotPolicy()) {
598 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
599 } else if (operand->HasFixedRegisterPolicy()) {
600 int reg_index = operand->fixed_register_index();
601 operand->ConvertTo(LOperand::REGISTER, reg_index);
602 } else if (operand->HasFixedDoubleRegisterPolicy()) {
603 int reg_index = operand->fixed_register_index();
604 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
605 } else {
606 UNREACHABLE();
607 }
608 if (is_tagged) {
609 TraceAlloc("Fixed reg is tagged at %d\n", pos);
610 LInstruction* instr = InstructionAt(pos);
611 if (instr->HasPointerMap()) {
612 instr->pointer_map()->RecordPointer(operand, chunk()->zone());
613 }
614 }
615 return operand;
616}
617
618
619LiveRange* LAllocator::FixedLiveRangeFor(int index) {
620 DCHECK(index < Register::kNumRegisters);
621 LiveRange* result = fixed_live_ranges_[index];
622 if (result == NULL) {
623 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
624 DCHECK(result->IsFixed());
625 result->kind_ = GENERAL_REGISTERS;
626 SetLiveRangeAssignedRegister(result, index);
627 fixed_live_ranges_[index] = result;
628 }
629 return result;
630}
631
632
633LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
634 DCHECK(index < DoubleRegister::kMaxNumRegisters);
635 LiveRange* result = fixed_double_live_ranges_[index];
636 if (result == NULL) {
637 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
638 chunk()->zone());
639 DCHECK(result->IsFixed());
640 result->kind_ = DOUBLE_REGISTERS;
641 SetLiveRangeAssignedRegister(result, index);
642 fixed_double_live_ranges_[index] = result;
643 }
644 return result;
645}
646
647
648LiveRange* LAllocator::LiveRangeFor(int index) {
649 if (index >= live_ranges_.length()) {
650 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
651 }
652 LiveRange* result = live_ranges_[index];
653 if (result == NULL) {
654 result = new(zone()) LiveRange(index, chunk()->zone());
655 live_ranges_[index] = result;
656 }
657 return result;
658}
659
660
661LGap* LAllocator::GetLastGap(HBasicBlock* block) {
662 int last_instruction = block->last_instruction_index();
663 int index = chunk_->NearestGapPos(last_instruction);
664 return GapAt(index);
665}
666
667
668HPhi* LAllocator::LookupPhi(LOperand* operand) const {
669 if (!operand->IsUnallocated()) return NULL;
670 int index = LUnallocated::cast(operand)->virtual_register();
671 HValue* instr = graph_->LookupValue(index);
672 if (instr != NULL && instr->IsPhi()) {
673 return HPhi::cast(instr);
674 }
675 return NULL;
676}
677
678
679LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
680 if (operand->IsUnallocated()) {
681 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
682 } else if (operand->IsRegister()) {
683 return FixedLiveRangeFor(operand->index());
684 } else if (operand->IsDoubleRegister()) {
685 return FixedDoubleLiveRangeFor(operand->index());
686 } else {
687 return NULL;
688 }
689}
690
691
692void LAllocator::Define(LifetimePosition position,
693 LOperand* operand,
694 LOperand* hint) {
695 LiveRange* range = LiveRangeFor(operand);
696 if (range == NULL) return;
697
698 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
699 // Can happen if there is a definition without use.
700 range->AddUseInterval(position, position.NextInstruction(), zone());
701 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
702 } else {
703 range->ShortenTo(position);
704 }
705
706 if (operand->IsUnallocated()) {
707 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
708 range->AddUsePosition(position, unalloc_operand, hint, zone());
709 }
710}
711
712
713void LAllocator::Use(LifetimePosition block_start,
714 LifetimePosition position,
715 LOperand* operand,
716 LOperand* hint) {
717 LiveRange* range = LiveRangeFor(operand);
718 if (range == NULL) return;
719 if (operand->IsUnallocated()) {
720 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
721 range->AddUsePosition(position, unalloc_operand, hint, zone());
722 }
723 range->AddUseInterval(block_start, position, zone());
724}
725
726
727void LAllocator::AddConstraintsGapMove(int index,
728 LOperand* from,
729 LOperand* to) {
730 LGap* gap = GapAt(index);
731 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
732 chunk()->zone());
733 if (from->IsUnallocated()) {
734 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
735 for (int i = 0; i < move_operands->length(); ++i) {
736 LMoveOperands cur = move_operands->at(i);
737 LOperand* cur_to = cur.destination();
738 if (cur_to->IsUnallocated()) {
739 if (LUnallocated::cast(cur_to)->virtual_register() ==
740 LUnallocated::cast(from)->virtual_register()) {
741 move->AddMove(cur.source(), to, chunk()->zone());
742 return;
743 }
744 }
745 }
746 }
747 move->AddMove(from, to, chunk()->zone());
748}
749
750
751void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
752 int start = block->first_instruction_index();
753 int end = block->last_instruction_index();
754 if (start == -1) return;
755 for (int i = start; i <= end; ++i) {
756 if (IsGapAt(i)) {
757 LInstruction* instr = NULL;
758 LInstruction* prev_instr = NULL;
759 if (i < end) instr = InstructionAt(i + 1);
760 if (i > start) prev_instr = InstructionAt(i - 1);
761 MeetConstraintsBetween(prev_instr, instr, i);
762 if (!AllocationOk()) return;
763 }
764 }
765}
766
767
768void LAllocator::MeetConstraintsBetween(LInstruction* first,
769 LInstruction* second,
770 int gap_index) {
771 // Handle fixed temporaries.
772 if (first != NULL) {
773 for (TempIterator it(first); !it.Done(); it.Advance()) {
774 LUnallocated* temp = LUnallocated::cast(it.Current());
775 if (temp->HasFixedPolicy()) {
776 AllocateFixed(temp, gap_index - 1, false);
777 }
778 }
779 }
780
781 // Handle fixed output operand.
782 if (first != NULL && first->Output() != NULL) {
783 LUnallocated* first_output = LUnallocated::cast(first->Output());
784 LiveRange* range = LiveRangeFor(first_output->virtual_register());
785 bool assigned = false;
786 if (first_output->HasFixedPolicy()) {
787 LUnallocated* output_copy = first_output->CopyUnconstrained(
788 chunk()->zone());
789 bool is_tagged = HasTaggedValue(first_output->virtual_register());
790 AllocateFixed(first_output, gap_index, is_tagged);
791
792 // This value is produced on the stack, we never need to spill it.
793 if (first_output->IsStackSlot()) {
794 range->SetSpillOperand(first_output);
795 range->SetSpillStartIndex(gap_index - 1);
796 assigned = true;
797 }
798 chunk_->AddGapMove(gap_index, first_output, output_copy);
799 }
800
801 if (!assigned) {
802 range->SetSpillStartIndex(gap_index);
803
804 // This move to spill operand is not a real use. Liveness analysis
805 // and splitting of live ranges do not account for it.
806 // Thus it should be inserted to a lifetime position corresponding to
807 // the instruction end.
808 LGap* gap = GapAt(gap_index);
809 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
810 chunk()->zone());
811 move->AddMove(first_output, range->GetSpillOperand(),
812 chunk()->zone());
813 }
814 }
815
816 // Handle fixed input operands of second instruction.
817 if (second != NULL) {
818 for (UseIterator it(second); !it.Done(); it.Advance()) {
819 LUnallocated* cur_input = LUnallocated::cast(it.Current());
820 if (cur_input->HasFixedPolicy()) {
821 LUnallocated* input_copy = cur_input->CopyUnconstrained(
822 chunk()->zone());
823 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
824 AllocateFixed(cur_input, gap_index + 1, is_tagged);
825 AddConstraintsGapMove(gap_index, input_copy, cur_input);
826 } else if (cur_input->HasWritableRegisterPolicy()) {
827 // The live range of writable input registers always goes until the end
828 // of the instruction.
829 DCHECK(!cur_input->IsUsedAtStart());
830
831 LUnallocated* input_copy = cur_input->CopyUnconstrained(
832 chunk()->zone());
833 int vreg = GetVirtualRegister();
834 if (!AllocationOk()) return;
835 cur_input->set_virtual_register(vreg);
836
837 if (RequiredRegisterKind(input_copy->virtual_register()) ==
838 DOUBLE_REGISTERS) {
839 double_artificial_registers_.Add(
840 cur_input->virtual_register() - first_artificial_register_,
841 zone());
842 }
843
844 AddConstraintsGapMove(gap_index, input_copy, cur_input);
845 }
846 }
847 }
848
849 // Handle "output same as input" for second instruction.
850 if (second != NULL && second->Output() != NULL) {
851 LUnallocated* second_output = LUnallocated::cast(second->Output());
852 if (second_output->HasSameAsInputPolicy()) {
853 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
854 int output_vreg = second_output->virtual_register();
855 int input_vreg = cur_input->virtual_register();
856
857 LUnallocated* input_copy = cur_input->CopyUnconstrained(
858 chunk()->zone());
859 cur_input->set_virtual_register(second_output->virtual_register());
860 AddConstraintsGapMove(gap_index, input_copy, cur_input);
861
862 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
863 int index = gap_index + 1;
864 LInstruction* instr = InstructionAt(index);
865 if (instr->HasPointerMap()) {
866 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
867 }
868 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
869 // The input is assumed to immediately have a tagged representation,
870 // before the pointer map can be used. I.e. the pointer map at the
871 // instruction will include the output operand (whose value at the
872 // beginning of the instruction is equal to the input operand). If
873 // this is not desired, then the pointer map at this instruction needs
874 // to be adjusted manually.
875 }
876 }
877 }
878}
879
880
881void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
882 int block_start = block->first_instruction_index();
883 int index = block->last_instruction_index();
884
885 LifetimePosition block_start_position =
886 LifetimePosition::FromInstructionIndex(block_start);
887
888 while (index >= block_start) {
889 LifetimePosition curr_position =
890 LifetimePosition::FromInstructionIndex(index);
891
892 if (IsGapAt(index)) {
893 // We have a gap at this position.
894 LGap* gap = GapAt(index);
895 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
896 chunk()->zone());
897 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
898 for (int i = 0; i < move_operands->length(); ++i) {
899 LMoveOperands* cur = &move_operands->at(i);
900 if (cur->IsIgnored()) continue;
901 LOperand* from = cur->source();
902 LOperand* to = cur->destination();
903 HPhi* phi = LookupPhi(to);
904 LOperand* hint = to;
905 if (phi != NULL) {
906 // This is a phi resolving move.
907 if (!phi->block()->IsLoopHeader()) {
908 hint = LiveRangeFor(phi->id())->current_hint_operand();
909 }
910 } else {
911 if (to->IsUnallocated()) {
912 if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
913 Define(curr_position, to, from);
914 live->Remove(LUnallocated::cast(to)->virtual_register());
915 } else {
916 cur->Eliminate();
917 continue;
918 }
919 } else {
920 Define(curr_position, to, from);
921 }
922 }
923 Use(block_start_position, curr_position, from, hint);
924 if (from->IsUnallocated()) {
925 live->Add(LUnallocated::cast(from)->virtual_register());
926 }
927 }
928 } else {
929 DCHECK(!IsGapAt(index));
930 LInstruction* instr = InstructionAt(index);
931
932 if (instr != NULL) {
933 LOperand* output = instr->Output();
934 if (output != NULL) {
935 if (output->IsUnallocated()) {
936 live->Remove(LUnallocated::cast(output)->virtual_register());
937 }
938 Define(curr_position, output, NULL);
939 }
940
941 if (instr->ClobbersRegisters()) {
942 for (int i = 0; i < Register::kNumRegisters; ++i) {
943 if (Register::from_code(i).IsAllocatable()) {
944 if (output == NULL || !output->IsRegister() ||
945 output->index() != i) {
946 LiveRange* range = FixedLiveRangeFor(i);
947 range->AddUseInterval(curr_position,
948 curr_position.InstructionEnd(), zone());
949 }
950 }
951 }
952 }
953
954 if (instr->ClobbersDoubleRegisters(isolate())) {
955 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) {
956 if (DoubleRegister::from_code(i).IsAllocatable()) {
957 if (output == NULL || !output->IsDoubleRegister() ||
958 output->index() != i) {
959 LiveRange* range = FixedDoubleLiveRangeFor(i);
960 range->AddUseInterval(curr_position,
961 curr_position.InstructionEnd(), zone());
962 }
963 }
964 }
965 }
966
967 for (UseIterator it(instr); !it.Done(); it.Advance()) {
968 LOperand* input = it.Current();
969
970 LifetimePosition use_pos;
971 if (input->IsUnallocated() &&
972 LUnallocated::cast(input)->IsUsedAtStart()) {
973 use_pos = curr_position;
974 } else {
975 use_pos = curr_position.InstructionEnd();
976 }
977
978 Use(block_start_position, use_pos, input, NULL);
979 if (input->IsUnallocated()) {
980 live->Add(LUnallocated::cast(input)->virtual_register());
981 }
982 }
983
984 for (TempIterator it(instr); !it.Done(); it.Advance()) {
985 LOperand* temp = it.Current();
986 if (instr->ClobbersTemps()) {
987 if (temp->IsRegister()) continue;
988 if (temp->IsUnallocated()) {
989 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
990 if (temp_unalloc->HasFixedPolicy()) {
991 continue;
992 }
993 }
994 }
995 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
996 Define(curr_position, temp, NULL);
997
998 if (temp->IsUnallocated()) {
999 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1000 if (temp_unalloc->HasDoubleRegisterPolicy()) {
1001 double_artificial_registers_.Add(
1002 temp_unalloc->virtual_register() - first_artificial_register_,
1003 zone());
1004 }
1005 }
1006 }
1007 }
1008 }
1009
1010 index = index - 1;
1011 }
1012}
1013
1014
1015void LAllocator::ResolvePhis(HBasicBlock* block) {
1016 const ZoneList<HPhi*>* phis = block->phis();
1017 for (int i = 0; i < phis->length(); ++i) {
1018 HPhi* phi = phis->at(i);
1019 LUnallocated* phi_operand =
1020 new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
1021 phi_operand->set_virtual_register(phi->id());
1022 for (int j = 0; j < phi->OperandCount(); ++j) {
1023 HValue* op = phi->OperandAt(j);
1024 LOperand* operand = NULL;
1025 if (op->IsConstant() && op->EmitAtUses()) {
1026 HConstant* constant = HConstant::cast(op);
1027 operand = chunk_->DefineConstantOperand(constant);
1028 } else {
1029 DCHECK(!op->EmitAtUses());
1030 LUnallocated* unalloc =
1031 new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1032 unalloc->set_virtual_register(op->id());
1033 operand = unalloc;
1034 }
1035 HBasicBlock* cur_block = block->predecessors()->at(j);
1036 // The gap move must be added without any special processing as in
1037 // the AddConstraintsGapMove.
1038 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1039 operand,
1040 phi_operand);
1041
1042 // We are going to insert a move before the branch instruction.
1043 // Some branch instructions (e.g. loops' back edges)
1044 // can potentially cause a GC so they have a pointer map.
1045 // By inserting a move we essentially create a copy of a
1046 // value which is invisible to PopulatePointerMaps(), because we store
1047 // it into a location different from the operand of a live range
1048 // covering a branch instruction.
1049 // Thus we need to manually record a pointer.
1050 LInstruction* branch =
1051 InstructionAt(cur_block->last_instruction_index());
1052 if (branch->HasPointerMap()) {
1053 if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1054 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1055 } else if (!phi->representation().IsDouble()) {
1056 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1057 }
1058 }
1059 }
1060
1061 LiveRange* live_range = LiveRangeFor(phi->id());
1062 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1063 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1064 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1065 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1066 }
1067}
1068
1069
1070bool LAllocator::Allocate(LChunk* chunk) {
1071 DCHECK(chunk_ == NULL);
1072 chunk_ = static_cast<LPlatformChunk*>(chunk);
1073 assigned_registers_ =
1074 new (chunk->zone()) BitVector(Register::kNumRegisters, chunk->zone());
1075 assigned_double_registers_ = new (chunk->zone())
1076 BitVector(DoubleRegister::kMaxNumRegisters, chunk->zone());
1077 MeetRegisterConstraints();
1078 if (!AllocationOk()) return false;
1079 ResolvePhis();
1080 BuildLiveRanges();
1081 AllocateGeneralRegisters();
1082 if (!AllocationOk()) return false;
1083 AllocateDoubleRegisters();
1084 if (!AllocationOk()) return false;
1085 PopulatePointerMaps();
1086 ConnectRanges();
1087 ResolveControlFlow();
1088 return true;
1089}
1090
1091
1092void LAllocator::MeetRegisterConstraints() {
1093 LAllocatorPhase phase("L_Register constraints", this);
1094 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1095 for (int i = 0; i < blocks->length(); ++i) {
1096 HBasicBlock* block = blocks->at(i);
1097 MeetRegisterConstraints(block);
1098 if (!AllocationOk()) return;
1099 }
1100}
1101
1102
1103void LAllocator::ResolvePhis() {
1104 LAllocatorPhase phase("L_Resolve phis", this);
1105
1106 // Process the blocks in reverse order.
1107 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1108 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1109 HBasicBlock* block = blocks->at(block_id);
1110 ResolvePhis(block);
1111 }
1112}
1113
1114
1115void LAllocator::ResolveControlFlow(LiveRange* range,
1116 HBasicBlock* block,
1117 HBasicBlock* pred) {
1118 LifetimePosition pred_end =
1119 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1120 LifetimePosition cur_start =
1121 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1122 LiveRange* pred_cover = NULL;
1123 LiveRange* cur_cover = NULL;
1124 LiveRange* cur_range = range;
1125 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1126 if (cur_range->CanCover(cur_start)) {
1127 DCHECK(cur_cover == NULL);
1128 cur_cover = cur_range;
1129 }
1130 if (cur_range->CanCover(pred_end)) {
1131 DCHECK(pred_cover == NULL);
1132 pred_cover = cur_range;
1133 }
1134 cur_range = cur_range->next();
1135 }
1136
1137 if (cur_cover->IsSpilled()) return;
1138 DCHECK(pred_cover != NULL && cur_cover != NULL);
1139 if (pred_cover != cur_cover) {
1140 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1141 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1142 if (!pred_op->Equals(cur_op)) {
1143 LGap* gap = NULL;
1144 if (block->predecessors()->length() == 1) {
1145 gap = GapAt(block->first_instruction_index());
1146 } else {
1147 DCHECK(pred->end()->SecondSuccessor() == NULL);
1148 gap = GetLastGap(pred);
1149
1150 // We are going to insert a move before the branch instruction.
1151 // Some branch instructions (e.g. loops' back edges)
1152 // can potentially cause a GC so they have a pointer map.
1153 // By inserting a move we essentially create a copy of a
1154 // value which is invisible to PopulatePointerMaps(), because we store
1155 // it into a location different from the operand of a live range
1156 // covering a branch instruction.
1157 // Thus we need to manually record a pointer.
1158 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1159 if (branch->HasPointerMap()) {
1160 if (HasTaggedValue(range->id())) {
1161 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1162 } else if (!cur_op->IsDoubleStackSlot() &&
1163 !cur_op->IsDoubleRegister()) {
1164 branch->pointer_map()->RemovePointer(cur_op);
1165 }
1166 }
1167 }
1168 gap->GetOrCreateParallelMove(
1169 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1170 chunk()->zone());
1171 }
1172 }
1173}
1174
1175
1176LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1177 int index = pos.InstructionIndex();
1178 if (IsGapAt(index)) {
1179 LGap* gap = GapAt(index);
1180 return gap->GetOrCreateParallelMove(
1181 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1182 }
1183 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1184 return GapAt(gap_pos)->GetOrCreateParallelMove(
1185 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1186}
1187
1188
1189HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1190 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1191 return gap->block();
1192}
1193
1194
1195void LAllocator::ConnectRanges() {
1196 LAllocatorPhase phase("L_Connect ranges", this);
1197 for (int i = 0; i < live_ranges()->length(); ++i) {
1198 LiveRange* first_range = live_ranges()->at(i);
1199 if (first_range == NULL || first_range->parent() != NULL) continue;
1200
1201 LiveRange* second_range = first_range->next();
1202 while (second_range != NULL) {
1203 LifetimePosition pos = second_range->Start();
1204
1205 if (!second_range->IsSpilled()) {
1206 // Add gap move if the two live ranges touch and there is no block
1207 // boundary.
1208 if (first_range->End().Value() == pos.Value()) {
1209 bool should_insert = true;
1210 if (IsBlockBoundary(pos)) {
1211 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1212 }
1213 if (should_insert) {
1214 LParallelMove* move = GetConnectingParallelMove(pos);
1215 LOperand* prev_operand = first_range->CreateAssignedOperand(
1216 chunk()->zone());
1217 LOperand* cur_operand = second_range->CreateAssignedOperand(
1218 chunk()->zone());
1219 move->AddMove(prev_operand, cur_operand,
1220 chunk()->zone());
1221 }
1222 }
1223 }
1224
1225 first_range = second_range;
1226 second_range = second_range->next();
1227 }
1228 }
1229}
1230
1231
1232bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1233 if (block->predecessors()->length() != 1) return false;
1234 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1235}
1236
1237
1238void LAllocator::ResolveControlFlow() {
1239 LAllocatorPhase phase("L_Resolve control flow", this);
1240 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1241 for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1242 HBasicBlock* block = blocks->at(block_id);
1243 if (CanEagerlyResolveControlFlow(block)) continue;
1244 BitVector* live = live_in_sets_[block->block_id()];
1245 BitVector::Iterator iterator(live);
1246 while (!iterator.Done()) {
1247 int operand_index = iterator.Current();
1248 for (int i = 0; i < block->predecessors()->length(); ++i) {
1249 HBasicBlock* cur = block->predecessors()->at(i);
1250 LiveRange* cur_range = LiveRangeFor(operand_index);
1251 ResolveControlFlow(cur_range, block, cur);
1252 }
1253 iterator.Advance();
1254 }
1255 }
1256}
1257
1258
1259void LAllocator::BuildLiveRanges() {
1260 LAllocatorPhase phase("L_Build live ranges", this);
1261 InitializeLivenessAnalysis();
1262 // Process the blocks in reverse order.
1263 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1264 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1265 HBasicBlock* block = blocks->at(block_id);
1266 BitVector* live = ComputeLiveOut(block);
1267 // Initially consider all live_out values live for the entire block. We
1268 // will shorten these intervals if necessary.
1269 AddInitialIntervals(block, live);
1270
1271 // Process the instructions in reverse order, generating and killing
1272 // live values.
1273 ProcessInstructions(block, live);
1274 // All phi output operands are killed by this block.
1275 const ZoneList<HPhi*>* phis = block->phis();
1276 for (int i = 0; i < phis->length(); ++i) {
1277 // The live range interval already ends at the first instruction of the
1278 // block.
1279 HPhi* phi = phis->at(i);
1280 live->Remove(phi->id());
1281
1282 LOperand* hint = NULL;
1283 LOperand* phi_operand = NULL;
1284 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1285 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1286 chunk()->zone());
1287 for (int j = 0; j < move->move_operands()->length(); ++j) {
1288 LOperand* to = move->move_operands()->at(j).destination();
1289 if (to->IsUnallocated() &&
1290 LUnallocated::cast(to)->virtual_register() == phi->id()) {
1291 hint = move->move_operands()->at(j).source();
1292 phi_operand = to;
1293 break;
1294 }
1295 }
1296 DCHECK(hint != NULL);
1297
1298 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1299 block->first_instruction_index());
1300 Define(block_start, phi_operand, hint);
1301 }
1302
1303 // Now live is live_in for this block except not including values live
1304 // out on backward successor edges.
1305 live_in_sets_[block_id] = live;
1306
1307 // If this block is a loop header go back and patch up the necessary
1308 // predecessor blocks.
1309 if (block->IsLoopHeader()) {
1310 // TODO(kmillikin): Need to be able to get the last block of the loop
1311 // in the loop information. Add a live range stretching from the first
1312 // loop instruction to the last for each value live on entry to the
1313 // header.
1314 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1315 BitVector::Iterator iterator(live);
1316 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1317 block->first_instruction_index());
1318 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1319 back_edge->last_instruction_index()).NextInstruction();
1320 while (!iterator.Done()) {
1321 int operand_index = iterator.Current();
1322 LiveRange* range = LiveRangeFor(operand_index);
1323 range->EnsureInterval(start, end, zone());
1324 iterator.Advance();
1325 }
1326
1327 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1328 live_in_sets_[i]->Union(*live);
1329 }
1330 }
1331
1332#ifdef DEBUG
1333 if (block_id == 0) {
1334 BitVector::Iterator iterator(live);
1335 bool found = false;
1336 while (!iterator.Done()) {
1337 found = true;
1338 int operand_index = iterator.Current();
1339 {
1340 AllowHandleDereference allow_deref;
1341 PrintF("Function: %s\n", chunk_->info()->GetDebugName().get());
1342 }
1343 PrintF("Value %d used before first definition!\n", operand_index);
1344 LiveRange* range = LiveRangeFor(operand_index);
1345 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1346 iterator.Advance();
1347 }
1348 DCHECK(!found);
1349 }
1350#endif
1351 }
1352
1353 for (int i = 0; i < live_ranges_.length(); ++i) {
1354 if (live_ranges_[i] != NULL) {
1355 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1356 }
1357 }
1358}
1359
1360
1361bool LAllocator::SafePointsAreInOrder() const {
1362 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1363 int safe_point = 0;
1364 for (int i = 0; i < pointer_maps->length(); ++i) {
1365 LPointerMap* map = pointer_maps->at(i);
1366 if (safe_point > map->lithium_position()) return false;
1367 safe_point = map->lithium_position();
1368 }
1369 return true;
1370}
1371
1372
1373void LAllocator::PopulatePointerMaps() {
1374 LAllocatorPhase phase("L_Populate pointer maps", this);
1375 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1376
1377 DCHECK(SafePointsAreInOrder());
1378
1379 // Iterate over all safe point positions and record a pointer
1380 // for all spilled live ranges at this point.
1381 int first_safe_point_index = 0;
1382 int last_range_start = 0;
1383 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1384 LiveRange* range = live_ranges()->at(range_idx);
1385 if (range == NULL) continue;
1386 // Iterate over the first parts of multi-part live ranges.
1387 if (range->parent() != NULL) continue;
1388 // Skip non-pointer values.
1389 if (!HasTaggedValue(range->id())) continue;
1390 // Skip empty live ranges.
1391 if (range->IsEmpty()) continue;
1392
1393 // Find the extent of the range and its children.
1394 int start = range->Start().InstructionIndex();
1395 int end = 0;
1396 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1397 LifetimePosition this_end = cur->End();
1398 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1399 DCHECK(cur->Start().InstructionIndex() >= start);
1400 }
1401
1402 // Most of the ranges are in order, but not all. Keep an eye on when
1403 // they step backwards and reset the first_safe_point_index so we don't
1404 // miss any safe points.
1405 if (start < last_range_start) {
1406 first_safe_point_index = 0;
1407 }
1408 last_range_start = start;
1409
1410 // Step across all the safe points that are before the start of this range,
1411 // recording how far we step in order to save doing this for the next range.
1412 while (first_safe_point_index < pointer_maps->length()) {
1413 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1414 int safe_point = map->lithium_position();
1415 if (safe_point >= start) break;
1416 first_safe_point_index++;
1417 }
1418
1419 // Step through the safe points to see whether they are in the range.
1420 for (int safe_point_index = first_safe_point_index;
1421 safe_point_index < pointer_maps->length();
1422 ++safe_point_index) {
1423 LPointerMap* map = pointer_maps->at(safe_point_index);
1424 int safe_point = map->lithium_position();
1425
1426 // The safe points are sorted so we can stop searching here.
1427 if (safe_point - 1 > end) break;
1428
1429 // Advance to the next active range that covers the current
1430 // safe point position.
1431 LifetimePosition safe_point_pos =
1432 LifetimePosition::FromInstructionIndex(safe_point);
1433 LiveRange* cur = range;
1434 while (cur != NULL && !cur->Covers(safe_point_pos)) {
1435 cur = cur->next();
1436 }
1437 if (cur == NULL) continue;
1438
1439 // Check if the live range is spilled and the safe point is after
1440 // the spill position.
1441 if (range->HasAllocatedSpillOperand() &&
1442 safe_point >= range->spill_start_index()) {
1443 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1444 range->id(), range->spill_start_index(), safe_point);
1445 map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1446 }
1447
1448 if (!cur->IsSpilled()) {
1449 TraceAlloc("Pointer in register for range %d (start at %d) "
1450 "at safe point %d\n",
1451 cur->id(), cur->Start().Value(), safe_point);
1452 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1453 DCHECK(!operand->IsStackSlot());
1454 map->RecordPointer(operand, chunk()->zone());
1455 }
1456 }
1457 }
1458}
1459
1460
1461void LAllocator::AllocateGeneralRegisters() {
1462 LAllocatorPhase phase("L_Allocate general registers", this);
1463 num_registers_ =
1464 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
1465 ->num_allocatable_general_registers();
1466 allocatable_register_codes_ =
1467 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
1468 ->allocatable_general_codes();
1469 mode_ = GENERAL_REGISTERS;
1470 AllocateRegisters();
1471}
1472
1473
1474void LAllocator::AllocateDoubleRegisters() {
1475 LAllocatorPhase phase("L_Allocate double registers", this);
1476 num_registers_ =
1477 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
1478 ->num_allocatable_double_registers();
1479 allocatable_register_codes_ =
1480 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
1481 ->allocatable_double_codes();
1482 mode_ = DOUBLE_REGISTERS;
1483 AllocateRegisters();
1484}
1485
1486
1487void LAllocator::AllocateRegisters() {
1488 DCHECK(unhandled_live_ranges_.is_empty());
1489
1490 for (int i = 0; i < live_ranges_.length(); ++i) {
1491 if (live_ranges_[i] != NULL) {
1492 if (live_ranges_[i]->Kind() == mode_) {
1493 AddToUnhandledUnsorted(live_ranges_[i]);
1494 }
1495 }
1496 }
1497 SortUnhandled();
1498 DCHECK(UnhandledIsSorted());
1499
1500 DCHECK(reusable_slots_.is_empty());
1501 DCHECK(active_live_ranges_.is_empty());
1502 DCHECK(inactive_live_ranges_.is_empty());
1503
1504 if (mode_ == DOUBLE_REGISTERS) {
1505 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1506 LiveRange* current = fixed_double_live_ranges_.at(i);
1507 if (current != NULL) {
1508 AddToInactive(current);
1509 }
1510 }
1511 } else {
1512 DCHECK(mode_ == GENERAL_REGISTERS);
1513 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1514 LiveRange* current = fixed_live_ranges_.at(i);
1515 if (current != NULL) {
1516 AddToInactive(current);
1517 }
1518 }
1519 }
1520
1521 while (!unhandled_live_ranges_.is_empty()) {
1522 DCHECK(UnhandledIsSorted());
1523 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1524 DCHECK(UnhandledIsSorted());
1525 LifetimePosition position = current->Start();
1526#ifdef DEBUG
1527 allocation_finger_ = position;
1528#endif
1529 TraceAlloc("Processing interval %d start=%d\n",
1530 current->id(),
1531 position.Value());
1532
1533 if (current->HasAllocatedSpillOperand()) {
1534 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1535 LifetimePosition next_pos = position;
1536 if (IsGapAt(next_pos.InstructionIndex())) {
1537 next_pos = next_pos.NextInstruction();
1538 }
1539 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1540 // If the range already has a spill operand and it doesn't need a
1541 // register immediately, split it and spill the first part of the range.
1542 if (pos == NULL) {
1543 Spill(current);
1544 continue;
1545 } else if (pos->pos().Value() >
1546 current->Start().NextInstruction().Value()) {
1547 // Do not spill live range eagerly if use position that can benefit from
1548 // the register is too close to the start of live range.
1549 SpillBetween(current, current->Start(), pos->pos());
1550 if (!AllocationOk()) return;
1551 DCHECK(UnhandledIsSorted());
1552 continue;
1553 }
1554 }
1555
1556 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1557 LiveRange* cur_active = active_live_ranges_.at(i);
1558 if (cur_active->End().Value() <= position.Value()) {
1559 ActiveToHandled(cur_active);
1560 --i; // The live range was removed from the list of active live ranges.
1561 } else if (!cur_active->Covers(position)) {
1562 ActiveToInactive(cur_active);
1563 --i; // The live range was removed from the list of active live ranges.
1564 }
1565 }
1566
1567 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1568 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1569 if (cur_inactive->End().Value() <= position.Value()) {
1570 InactiveToHandled(cur_inactive);
1571 --i; // Live range was removed from the list of inactive live ranges.
1572 } else if (cur_inactive->Covers(position)) {
1573 InactiveToActive(cur_inactive);
1574 --i; // Live range was removed from the list of inactive live ranges.
1575 }
1576 }
1577
1578 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
1579
1580 bool result = TryAllocateFreeReg(current);
1581 if (!AllocationOk()) return;
1582
1583 if (!result) AllocateBlockedReg(current);
1584 if (!AllocationOk()) return;
1585
1586 if (current->HasRegisterAssigned()) {
1587 AddToActive(current);
1588 }
1589 }
1590
1591 reusable_slots_.Rewind(0);
1592 active_live_ranges_.Rewind(0);
1593 inactive_live_ranges_.Rewind(0);
1594}
1595
1596
1597const char* LAllocator::RegisterName(int allocation_index) {
1598 if (mode_ == GENERAL_REGISTERS) {
1599 return Register::from_code(allocation_index).ToString();
1600 } else {
1601 return DoubleRegister::from_code(allocation_index).ToString();
1602 }
1603}
1604
1605
1606void LAllocator::TraceAlloc(const char* msg, ...) {
1607 if (FLAG_trace_alloc) {
1608 va_list arguments;
1609 va_start(arguments, msg);
1610 base::OS::VPrint(msg, arguments);
1611 va_end(arguments);
1612 }
1613}
1614
1615
1616bool LAllocator::HasTaggedValue(int virtual_register) const {
1617 HValue* value = graph_->LookupValue(virtual_register);
1618 if (value == NULL) return false;
1619 return value->representation().IsTagged() && !value->type().IsSmi();
1620}
1621
1622
1623RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1624 if (virtual_register < first_artificial_register_) {
1625 HValue* value = graph_->LookupValue(virtual_register);
1626 if (value != NULL && value->representation().IsDouble()) {
1627 return DOUBLE_REGISTERS;
1628 }
1629 } else if (double_artificial_registers_.Contains(
1630 virtual_register - first_artificial_register_)) {
1631 return DOUBLE_REGISTERS;
1632 }
1633
1634 return GENERAL_REGISTERS;
1635}
1636
1637
1638void LAllocator::AddToActive(LiveRange* range) {
1639 TraceAlloc("Add live range %d to active\n", range->id());
1640 active_live_ranges_.Add(range, zone());
1641}
1642
1643
1644void LAllocator::AddToInactive(LiveRange* range) {
1645 TraceAlloc("Add live range %d to inactive\n", range->id());
1646 inactive_live_ranges_.Add(range, zone());
1647}
1648
1649
1650void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1651 if (range == NULL || range->IsEmpty()) return;
1652 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1653 DCHECK(allocation_finger_.Value() <= range->Start().Value());
1654 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1655 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1656 if (range->ShouldBeAllocatedBefore(cur_range)) {
1657 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1658 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1659 DCHECK(UnhandledIsSorted());
1660 return;
1661 }
1662 }
1663 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1664 unhandled_live_ranges_.InsertAt(0, range, zone());
1665 DCHECK(UnhandledIsSorted());
1666}
1667
1668
1669void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1670 if (range == NULL || range->IsEmpty()) return;
1671 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1672 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1673 unhandled_live_ranges_.Add(range, zone());
1674}
1675
1676
1677static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1678 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
1679 !(*b)->ShouldBeAllocatedBefore(*a));
1680 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1681 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1682 return (*a)->id() - (*b)->id();
1683}
1684
1685
1686// Sort the unhandled live ranges so that the ranges to be processed first are
1687// at the end of the array list. This is convenient for the register allocation
1688// algorithm because it is efficient to remove elements from the end.
1689void LAllocator::SortUnhandled() {
1690 TraceAlloc("Sort unhandled\n");
1691 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1692}
1693
1694
1695bool LAllocator::UnhandledIsSorted() {
1696 int len = unhandled_live_ranges_.length();
1697 for (int i = 1; i < len; i++) {
1698 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1699 LiveRange* b = unhandled_live_ranges_.at(i);
1700 if (a->Start().Value() < b->Start().Value()) return false;
1701 }
1702 return true;
1703}
1704
1705
1706void LAllocator::FreeSpillSlot(LiveRange* range) {
1707 // Check that we are the last range.
1708 if (range->next() != NULL) return;
1709
1710 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1711
1712 int index = range->TopLevel()->GetSpillOperand()->index();
1713 if (index >= 0) {
1714 reusable_slots_.Add(range, zone());
1715 }
1716}
1717
1718
1719LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1720 if (reusable_slots_.is_empty()) return NULL;
1721 if (reusable_slots_.first()->End().Value() >
1722 range->TopLevel()->Start().Value()) {
1723 return NULL;
1724 }
1725 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1726 reusable_slots_.Remove(0);
1727 return result;
1728}
1729
1730
1731void LAllocator::ActiveToHandled(LiveRange* range) {
1732 DCHECK(active_live_ranges_.Contains(range));
1733 active_live_ranges_.RemoveElement(range);
1734 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1735 FreeSpillSlot(range);
1736}
1737
1738
1739void LAllocator::ActiveToInactive(LiveRange* range) {
1740 DCHECK(active_live_ranges_.Contains(range));
1741 active_live_ranges_.RemoveElement(range);
1742 inactive_live_ranges_.Add(range, zone());
1743 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1744}
1745
1746
1747void LAllocator::InactiveToHandled(LiveRange* range) {
1748 DCHECK(inactive_live_ranges_.Contains(range));
1749 inactive_live_ranges_.RemoveElement(range);
1750 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1751 FreeSpillSlot(range);
1752}
1753
1754
1755void LAllocator::InactiveToActive(LiveRange* range) {
1756 DCHECK(inactive_live_ranges_.Contains(range));
1757 inactive_live_ranges_.RemoveElement(range);
1758 active_live_ranges_.Add(range, zone());
1759 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1760}
1761
1762
1763bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1764 DCHECK(DoubleRegister::kMaxNumRegisters >= Register::kNumRegisters);
1765
1766 LifetimePosition free_until_pos[DoubleRegister::kMaxNumRegisters];
1767
1768 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
1769 free_until_pos[i] = LifetimePosition::MaxPosition();
1770 }
1771
1772 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1773 LiveRange* cur_active = active_live_ranges_.at(i);
1774 free_until_pos[cur_active->assigned_register()] =
1775 LifetimePosition::FromInstructionIndex(0);
1776 }
1777
1778 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1779 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1780 DCHECK(cur_inactive->End().Value() > current->Start().Value());
1781 LifetimePosition next_intersection =
1782 cur_inactive->FirstIntersection(current);
1783 if (!next_intersection.IsValid()) continue;
1784 int cur_reg = cur_inactive->assigned_register();
1785 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1786 }
1787
1788 LOperand* hint = current->FirstHint();
1789 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1790 int register_index = hint->index();
1791 TraceAlloc(
1792 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1793 RegisterName(register_index),
1794 free_until_pos[register_index].Value(),
1795 current->id(),
1796 current->End().Value());
1797
1798 // The desired register is free until the end of the current live range.
1799 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1800 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1801 RegisterName(register_index),
1802 current->id());
1803 SetLiveRangeAssignedRegister(current, register_index);
1804 return true;
1805 }
1806 }
1807
1808 // Find the register which stays free for the longest time.
1809 int reg = allocatable_register_codes_[0];
1810 for (int i = 1; i < RegisterCount(); ++i) {
1811 int code = allocatable_register_codes_[i];
1812 if (free_until_pos[code].Value() > free_until_pos[reg].Value()) {
1813 reg = code;
1814 }
1815 }
1816
1817 LifetimePosition pos = free_until_pos[reg];
1818
1819 if (pos.Value() <= current->Start().Value()) {
1820 // All registers are blocked.
1821 return false;
1822 }
1823
1824 if (pos.Value() < current->End().Value()) {
1825 // Register reg is available at the range start but becomes blocked before
1826 // the range end. Split current at position where it becomes blocked.
1827 LiveRange* tail = SplitRangeAt(current, pos);
1828 if (!AllocationOk()) return false;
1829 AddToUnhandledSorted(tail);
1830 }
1831
1832
1833 // Register reg is available at the range start and is free until
1834 // the range end.
1835 DCHECK(pos.Value() >= current->End().Value());
1836 TraceAlloc("Assigning free reg %s to live range %d\n",
1837 RegisterName(reg),
1838 current->id());
1839 SetLiveRangeAssignedRegister(current, reg);
1840
1841 return true;
1842}
1843
1844
1845void LAllocator::AllocateBlockedReg(LiveRange* current) {
1846 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1847 if (register_use == NULL) {
1848 // There is no use in the current live range that requires a register.
1849 // We can just spill it.
1850 Spill(current);
1851 return;
1852 }
1853
1854
1855 LifetimePosition use_pos[DoubleRegister::kMaxNumRegisters];
1856 LifetimePosition block_pos[DoubleRegister::kMaxNumRegisters];
1857
1858 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
1859 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1860 }
1861
1862 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1863 LiveRange* range = active_live_ranges_[i];
1864 int cur_reg = range->assigned_register();
1865 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1866 block_pos[cur_reg] = use_pos[cur_reg] =
1867 LifetimePosition::FromInstructionIndex(0);
1868 } else {
1869 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1870 current->Start());
1871 if (next_use == NULL) {
1872 use_pos[cur_reg] = range->End();
1873 } else {
1874 use_pos[cur_reg] = next_use->pos();
1875 }
1876 }
1877 }
1878
1879 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1880 LiveRange* range = inactive_live_ranges_.at(i);
1881 DCHECK(range->End().Value() > current->Start().Value());
1882 LifetimePosition next_intersection = range->FirstIntersection(current);
1883 if (!next_intersection.IsValid()) continue;
1884 int cur_reg = range->assigned_register();
1885 if (range->IsFixed()) {
1886 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1887 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1888 } else {
1889 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1890 }
1891 }
1892
1893 int reg = allocatable_register_codes_[0];
1894 for (int i = 1; i < RegisterCount(); ++i) {
1895 int code = allocatable_register_codes_[i];
1896 if (use_pos[code].Value() > use_pos[reg].Value()) {
1897 reg = code;
1898 }
1899 }
1900
1901 LifetimePosition pos = use_pos[reg];
1902
1903 if (pos.Value() < register_use->pos().Value()) {
1904 // All registers are blocked before the first use that requires a register.
1905 // Spill starting part of live range up to that use.
1906 SpillBetween(current, current->Start(), register_use->pos());
1907 return;
1908 }
1909
1910 if (block_pos[reg].Value() < current->End().Value()) {
1911 // Register becomes blocked before the current range end. Split before that
1912 // position.
1913 LiveRange* tail = SplitBetween(current,
1914 current->Start(),
1915 block_pos[reg].InstructionStart());
1916 if (!AllocationOk()) return;
1917 AddToUnhandledSorted(tail);
1918 }
1919
1920 // Register reg is not blocked for the whole range.
1921 DCHECK(block_pos[reg].Value() >= current->End().Value());
1922 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1923 RegisterName(reg),
1924 current->id());
1925 SetLiveRangeAssignedRegister(current, reg);
1926
1927 // This register was not free. Thus we need to find and spill
1928 // parts of active and inactive live regions that use the same register
1929 // at the same lifetime positions as current.
1930 SplitAndSpillIntersecting(current);
1931}
1932
1933
1934LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1935 LifetimePosition pos) {
1936 HBasicBlock* block = GetBlock(pos.InstructionStart());
1937 HBasicBlock* loop_header =
1938 block->IsLoopHeader() ? block : block->parent_loop_header();
1939
1940 if (loop_header == NULL) return pos;
1941
1942 UsePosition* prev_use =
1943 range->PreviousUsePositionRegisterIsBeneficial(pos);
1944
1945 while (loop_header != NULL) {
1946 // We are going to spill live range inside the loop.
1947 // If possible try to move spilling position backwards to loop header.
1948 // This will reduce number of memory moves on the back edge.
1949 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1950 loop_header->first_instruction_index());
1951
1952 if (range->Covers(loop_start)) {
1953 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1954 // No register beneficial use inside the loop before the pos.
1955 pos = loop_start;
1956 }
1957 }
1958
1959 // Try hoisting out to an outer loop.
1960 loop_header = loop_header->parent_loop_header();
1961 }
1962
1963 return pos;
1964}
1965
1966
1967void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1968 DCHECK(current->HasRegisterAssigned());
1969 int reg = current->assigned_register();
1970 LifetimePosition split_pos = current->Start();
1971 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1972 LiveRange* range = active_live_ranges_[i];
1973 if (range->assigned_register() == reg) {
1974 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1975 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1976 if (next_pos == NULL) {
1977 SpillAfter(range, spill_pos);
1978 } else {
1979 // When spilling between spill_pos and next_pos ensure that the range
1980 // remains spilled at least until the start of the current live range.
1981 // This guarantees that we will not introduce new unhandled ranges that
1982 // start before the current range as this violates allocation invariant
1983 // and will lead to an inconsistent state of active and inactive
1984 // live-ranges: ranges are allocated in order of their start positions,
1985 // ranges are retired from active/inactive when the start of the
1986 // current live-range is larger than their end.
1987 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
1988 }
1989 if (!AllocationOk()) return;
1990 ActiveToHandled(range);
1991 --i;
1992 }
1993 }
1994
1995 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1996 LiveRange* range = inactive_live_ranges_[i];
1997 DCHECK(range->End().Value() > current->Start().Value());
1998 if (range->assigned_register() == reg && !range->IsFixed()) {
1999 LifetimePosition next_intersection = range->FirstIntersection(current);
2000 if (next_intersection.IsValid()) {
2001 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2002 if (next_pos == NULL) {
2003 SpillAfter(range, split_pos);
2004 } else {
2005 next_intersection = Min(next_intersection, next_pos->pos());
2006 SpillBetween(range, split_pos, next_intersection);
2007 }
2008 if (!AllocationOk()) return;
2009 InactiveToHandled(range);
2010 --i;
2011 }
2012 }
2013 }
2014}
2015
2016
2017bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2018 return pos.IsInstructionStart() &&
2019 InstructionAt(pos.InstructionIndex())->IsLabel();
2020}
2021
2022
2023LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2024 DCHECK(!range->IsFixed());
2025 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2026
2027 if (pos.Value() <= range->Start().Value()) return range;
2028
2029 // We can't properly connect liveranges if split occured at the end
2030 // of control instruction.
2031 DCHECK(pos.IsInstructionStart() ||
2032 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2033
2034 int vreg = GetVirtualRegister();
2035 if (!AllocationOk()) return NULL;
2036 LiveRange* result = LiveRangeFor(vreg);
2037 range->SplitAt(pos, result, zone());
2038 return result;
2039}
2040
2041
2042LiveRange* LAllocator::SplitBetween(LiveRange* range,
2043 LifetimePosition start,
2044 LifetimePosition end) {
2045 DCHECK(!range->IsFixed());
2046 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2047 range->id(),
2048 start.Value(),
2049 end.Value());
2050
2051 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2052 DCHECK(split_pos.Value() >= start.Value());
2053 return SplitRangeAt(range, split_pos);
2054}
2055
2056
2057LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2058 LifetimePosition end) {
2059 int start_instr = start.InstructionIndex();
2060 int end_instr = end.InstructionIndex();
2061 DCHECK(start_instr <= end_instr);
2062
2063 // We have no choice
2064 if (start_instr == end_instr) return end;
2065
2066 HBasicBlock* start_block = GetBlock(start);
2067 HBasicBlock* end_block = GetBlock(end);
2068
2069 if (end_block == start_block) {
2070 // The interval is split in the same basic block. Split at the latest
2071 // possible position.
2072 return end;
2073 }
2074
2075 HBasicBlock* block = end_block;
2076 // Find header of outermost loop.
2077 while (block->parent_loop_header() != NULL &&
2078 block->parent_loop_header()->block_id() > start_block->block_id()) {
2079 block = block->parent_loop_header();
2080 }
2081
2082 // We did not find any suitable outer loop. Split at the latest possible
2083 // position unless end_block is a loop header itself.
2084 if (block == end_block && !end_block->IsLoopHeader()) return end;
2085
2086 return LifetimePosition::FromInstructionIndex(
2087 block->first_instruction_index());
2088}
2089
2090
2091void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2092 LiveRange* second_part = SplitRangeAt(range, pos);
2093 if (!AllocationOk()) return;
2094 Spill(second_part);
2095}
2096
2097
2098void LAllocator::SpillBetween(LiveRange* range,
2099 LifetimePosition start,
2100 LifetimePosition end) {
2101 SpillBetweenUntil(range, start, start, end);
2102}
2103
2104
2105void LAllocator::SpillBetweenUntil(LiveRange* range,
2106 LifetimePosition start,
2107 LifetimePosition until,
2108 LifetimePosition end) {
2109 CHECK(start.Value() < end.Value());
2110 LiveRange* second_part = SplitRangeAt(range, start);
2111 if (!AllocationOk()) return;
2112
2113 if (second_part->Start().Value() < end.Value()) {
2114 // The split result intersects with [start, end[.
2115 // Split it at position between ]start+1, end[, spill the middle part
2116 // and put the rest to unhandled.
2117 LiveRange* third_part = SplitBetween(
2118 second_part,
2119 Max(second_part->Start().InstructionEnd(), until),
2120 end.PrevInstruction().InstructionEnd());
2121 if (!AllocationOk()) return;
2122
2123 DCHECK(third_part != second_part);
2124
2125 Spill(second_part);
2126 AddToUnhandledSorted(third_part);
2127 } else {
2128 // The split result does not intersect with [start, end[.
2129 // Nothing to spill. Just put it to unhandled as whole.
2130 AddToUnhandledSorted(second_part);
2131 }
2132}
2133
2134
2135void LAllocator::Spill(LiveRange* range) {
2136 DCHECK(!range->IsSpilled());
2137 TraceAlloc("Spilling live range %d\n", range->id());
2138 LiveRange* first = range->TopLevel();
2139
2140 if (!first->HasAllocatedSpillOperand()) {
2141 LOperand* op = TryReuseSpillSlot(range);
2142 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
2143 first->SetSpillOperand(op);
2144 }
2145 range->MakeSpilled(chunk()->zone());
2146}
2147
2148
2149int LAllocator::RegisterCount() const {
2150 return num_registers_;
2151}
2152
2153
2154#ifdef DEBUG
2155
2156
2157void LAllocator::Verify() const {
2158 for (int i = 0; i < live_ranges()->length(); ++i) {
2159 LiveRange* current = live_ranges()->at(i);
2160 if (current != NULL) current->Verify();
2161 }
2162}
2163
2164
2165#endif
2166
2167
2168LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2169 : CompilationPhase(name, allocator->graph()->info()),
2170 allocator_(allocator) {
2171 if (FLAG_hydrogen_stats) {
2172 allocator_zone_start_allocation_size_ =
2173 allocator->zone()->allocation_size();
2174 }
2175}
2176
2177
2178LAllocatorPhase::~LAllocatorPhase() {
2179 if (FLAG_hydrogen_stats) {
2180 size_t size = allocator_->zone()->allocation_size() -
2181 allocator_zone_start_allocation_size_;
2182 isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2183 }
2184
2185 if (ShouldProduceTraceOutput()) {
2186 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2187 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2188 }
2189
2190#ifdef DEBUG
2191 if (allocator_ != NULL) allocator_->Verify();
2192#endif
2193}
2194
2195
2196} // namespace internal
2197} // namespace v8