blob: ac61c17ba7685615fe49dd3f66928e291b9f69a8 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "lithium-allocator.h"
29
30#include "data-flow.h"
31#include "hydrogen.h"
32#include "string-stream.h"
33
34#if V8_TARGET_ARCH_IA32
35#include "ia32/lithium-ia32.h"
36#elif V8_TARGET_ARCH_X64
37#include "x64/lithium-x64.h"
38#elif V8_TARGET_ARCH_ARM
39#include "arm/lithium-arm.h"
40#else
41#error "Unknown architecture."
42#endif
43
44namespace v8 {
45namespace internal {
46
47
48#define DEFINE_OPERAND_CACHE(name, type) \
49 name name::cache[name::kNumCachedOperands]; \
50 void name::SetupCache() { \
51 for (int i = 0; i < kNumCachedOperands; i++) { \
52 cache[i].ConvertTo(type, i); \
53 } \
54 }
55
56DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
57DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
58DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
59DEFINE_OPERAND_CACHE(LRegister, REGISTER)
60DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
61
62#undef DEFINE_OPERAND_CACHE
63
64
65static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
66 return a.Value() < b.Value() ? a : b;
67}
68
69
70static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
71 return a.Value() > b.Value() ? a : b;
72}
73
74
75void LOperand::PrintTo(StringStream* stream) {
76 LUnallocated* unalloc = NULL;
77 switch (kind()) {
78 case INVALID:
79 break;
80 case UNALLOCATED:
81 unalloc = LUnallocated::cast(this);
82 stream->Add("v%d", unalloc->virtual_register());
83 switch (unalloc->policy()) {
84 case LUnallocated::NONE:
85 break;
86 case LUnallocated::FIXED_REGISTER: {
87 const char* register_name =
88 Register::AllocationIndexToString(unalloc->fixed_index());
89 stream->Add("(=%s)", register_name);
90 break;
91 }
92 case LUnallocated::FIXED_DOUBLE_REGISTER: {
93 const char* double_register_name =
94 DoubleRegister::AllocationIndexToString(unalloc->fixed_index());
95 stream->Add("(=%s)", double_register_name);
96 break;
97 }
98 case LUnallocated::FIXED_SLOT:
99 stream->Add("(=%dS)", unalloc->fixed_index());
100 break;
101 case LUnallocated::MUST_HAVE_REGISTER:
102 stream->Add("(R)");
103 break;
104 case LUnallocated::WRITABLE_REGISTER:
105 stream->Add("(WR)");
106 break;
107 case LUnallocated::SAME_AS_FIRST_INPUT:
108 stream->Add("(1)");
109 break;
110 case LUnallocated::SAME_AS_ANY_INPUT:
111 stream->Add("(A)");
112 break;
113 case LUnallocated::ANY:
114 stream->Add("(-)");
115 break;
116 case LUnallocated::IGNORE:
117 stream->Add("(0)");
118 break;
119 }
120 break;
121 case CONSTANT_OPERAND:
122 stream->Add("[constant:%d]", index());
123 break;
124 case STACK_SLOT:
125 stream->Add("[stack:%d]", index());
126 break;
127 case DOUBLE_STACK_SLOT:
128 stream->Add("[double_stack:%d]", index());
129 break;
130 case REGISTER:
131 stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
132 break;
133 case DOUBLE_REGISTER:
134 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
135 break;
136 case ARGUMENT:
137 stream->Add("[arg:%d]", index());
138 break;
139 }
140}
141
142int LOperand::VirtualRegister() {
143 LUnallocated* unalloc = LUnallocated::cast(this);
144 return unalloc->virtual_register();
145}
146
147
148bool UsePosition::RequiresRegister() const {
149 return requires_reg_;
150}
151
152
153bool UsePosition::RegisterIsBeneficial() const {
154 return register_beneficial_;
155}
156
157
158void UseInterval::SplitAt(LifetimePosition pos) {
159 ASSERT(Contains(pos) && pos.Value() != start().Value());
160 UseInterval* after = new UseInterval(pos, end_);
161 after->next_ = next_;
162 next_ = after;
163 end_ = pos;
164}
165
166
167#ifdef DEBUG
168
169
170void LiveRange::Verify() const {
171 UsePosition* cur = first_pos_;
172 while (cur != NULL) {
173 ASSERT(Start().Value() <= cur->pos().Value() &&
174 cur->pos().Value() <= End().Value());
175 cur = cur->next();
176 }
177}
178
179
180bool LiveRange::HasOverlap(UseInterval* target) const {
181 UseInterval* current_interval = first_interval_;
182 while (current_interval != NULL) {
183 // Intervals overlap if the start of one is contained in the other.
184 if (current_interval->Contains(target->start()) ||
185 target->Contains(current_interval->start())) {
186 return true;
187 }
188 current_interval = current_interval->next();
189 }
190 return false;
191}
192
193
194#endif
195
196
197UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
198 UsePosition* use_pos = last_processed_use_;
199 if (use_pos == NULL) use_pos = first_pos();
200 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
201 use_pos = use_pos->next();
202 }
203 last_processed_use_ = use_pos;
204 return use_pos;
205}
206
207
208UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
209 LifetimePosition start) {
210 UsePosition* pos = NextUsePosition(start);
211 while (pos != NULL && !pos->RegisterIsBeneficial()) {
212 pos = pos->next();
213 }
214 return pos;
215}
216
217
218UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
219 UsePosition* pos = NextUsePosition(start);
220 while (pos != NULL && !pos->RequiresRegister()) {
221 pos = pos->next();
222 }
223 return pos;
224}
225
226
227bool LiveRange::CanBeSpilled(LifetimePosition pos) {
228 // TODO(kmillikin): Comment. Now.
229 if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
230
231 // We cannot spill a live range that has a use requiring a register
232 // at the current or the immediate next position.
233 UsePosition* use_pos = NextRegisterPosition(pos);
234 if (use_pos == NULL) return true;
235 return use_pos->pos().Value() > pos.NextInstruction().Value();
236}
237
238
239UsePosition* LiveRange::FirstPosWithHint() const {
240 UsePosition* pos = first_pos_;
241 while (pos != NULL && !pos->HasHint()) pos = pos->next();
242 return pos;
243}
244
245
246LOperand* LiveRange::CreateAssignedOperand() {
247 LOperand* op = NULL;
248 if (HasRegisterAssigned()) {
249 ASSERT(!IsSpilled());
250 if (IsDouble()) {
251 op = LDoubleRegister::Create(assigned_register());
252 } else {
253 op = LRegister::Create(assigned_register());
254 }
255 } else if (IsSpilled()) {
256 ASSERT(!HasRegisterAssigned());
257 op = TopLevel()->GetSpillOperand();
258 ASSERT(!op->IsUnallocated());
259 } else {
260 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
261 unalloc->set_virtual_register(id_);
262 op = unalloc;
263 }
264 return op;
265}
266
267
268UseInterval* LiveRange::FirstSearchIntervalForPosition(
269 LifetimePosition position) const {
270 if (current_interval_ == NULL) return first_interval_;
271 if (current_interval_->start().Value() > position.Value()) {
272 current_interval_ = NULL;
273 return first_interval_;
274 }
275 return current_interval_;
276}
277
278
279void LiveRange::AdvanceLastProcessedMarker(
280 UseInterval* to_start_of, LifetimePosition but_not_past) const {
281 if (to_start_of == NULL) return;
282 if (to_start_of->start().Value() > but_not_past.Value()) return;
283 LifetimePosition start =
284 current_interval_ == NULL ? LifetimePosition::Invalid()
285 : current_interval_->start();
286 if (to_start_of->start().Value() > start.Value()) {
287 current_interval_ = to_start_of;
288 }
289}
290
291
292void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
293 ASSERT(Start().Value() < position.Value());
294 ASSERT(result->IsEmpty());
295 // Find the last interval that ends before the position. If the
296 // position is contained in one of the intervals in the chain, we
297 // split that interval and use the first part.
298 UseInterval* current = FirstSearchIntervalForPosition(position);
299
300 // If the split position coincides with the beginning of a use interval
301 // we need to split use positons in a special way.
302 bool split_at_start = false;
303
304 while (current != NULL) {
305 if (current->Contains(position)) {
306 current->SplitAt(position);
307 break;
308 }
309 UseInterval* next = current->next();
310 if (next->start().Value() >= position.Value()) {
311 split_at_start = (next->start().Value() == position.Value());
312 break;
313 }
314 current = next;
315 }
316
317 // Partition original use intervals to the two live ranges.
318 UseInterval* before = current;
319 UseInterval* after = before->next();
320 result->last_interval_ = (last_interval_ == before)
321 ? after // Only interval in the range after split.
322 : last_interval_; // Last interval of the original range.
323 result->first_interval_ = after;
324 last_interval_ = before;
325
326 // Find the last use position before the split and the first use
327 // position after it.
328 UsePosition* use_after = first_pos_;
329 UsePosition* use_before = NULL;
330 if (split_at_start) {
331 // The split position coincides with the beginning of a use interval (the
332 // end of a lifetime hole). Use at this position should be attributed to
333 // the split child because split child owns use interval covering it.
334 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
335 use_before = use_after;
336 use_after = use_after->next();
337 }
338 } else {
339 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
340 use_before = use_after;
341 use_after = use_after->next();
342 }
343 }
344
345 // Partition original use positions to the two live ranges.
346 if (use_before != NULL) {
347 use_before->next_ = NULL;
348 } else {
349 first_pos_ = NULL;
350 }
351 result->first_pos_ = use_after;
352
353 // Link the new live range in the chain before any of the other
354 // ranges linked from the range before the split.
355 result->parent_ = (parent_ == NULL) ? this : parent_;
356 result->next_ = next_;
357 next_ = result;
358
359#ifdef DEBUG
360 Verify();
361 result->Verify();
362#endif
363}
364
365
366// This implements an ordering on live ranges so that they are ordered by their
367// start positions. This is needed for the correctness of the register
368// allocation algorithm. If two live ranges start at the same offset then there
369// is a tie breaker based on where the value is first used. This part of the
370// ordering is merely a heuristic.
371bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
372 LifetimePosition start = Start();
373 LifetimePosition other_start = other->Start();
374 if (start.Value() == other_start.Value()) {
375 UsePosition* pos = FirstPosWithHint();
376 if (pos == NULL) return false;
377 UsePosition* other_pos = other->first_pos();
378 if (other_pos == NULL) return true;
379 return pos->pos().Value() < other_pos->pos().Value();
380 }
381 return start.Value() < other_start.Value();
382}
383
384
385void LiveRange::ShortenTo(LifetimePosition start) {
386 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
387 ASSERT(first_interval_ != NULL);
388 ASSERT(first_interval_->start().Value() <= start.Value());
389 ASSERT(start.Value() < first_interval_->end().Value());
390 first_interval_->set_start(start);
391}
392
393
394void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
395 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
396 id_,
397 start.Value(),
398 end.Value());
399 LifetimePosition new_end = end;
400 while (first_interval_ != NULL &&
401 first_interval_->start().Value() <= end.Value()) {
402 if (first_interval_->end().Value() > end.Value()) {
403 new_end = first_interval_->end();
404 }
405 first_interval_ = first_interval_->next();
406 }
407
408 UseInterval* new_interval = new UseInterval(start, new_end);
409 new_interval->next_ = first_interval_;
410 first_interval_ = new_interval;
411 if (new_interval->next() == NULL) {
412 last_interval_ = new_interval;
413 }
414}
415
416
417void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
418 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
419 id_,
420 start.Value(),
421 end.Value());
422 if (first_interval_ == NULL) {
423 UseInterval* interval = new UseInterval(start, end);
424 first_interval_ = interval;
425 last_interval_ = interval;
426 } else {
427 if (end.Value() == first_interval_->start().Value()) {
428 first_interval_->set_start(start);
429 } else if (end.Value() < first_interval_->start().Value()) {
430 UseInterval* interval = new UseInterval(start, end);
431 interval->set_next(first_interval_);
432 first_interval_ = interval;
433 } else {
434 // Order of instruction's processing (see ProcessInstructions) guarantees
435 // that each new use interval either precedes or intersects with
436 // last added interval.
437 ASSERT(start.Value() < first_interval_->end().Value());
438 first_interval_->start_ = Min(start, first_interval_->start_);
439 first_interval_->end_ = Max(end, first_interval_->end_);
440 }
441 }
442}
443
444
445UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
446 LOperand* operand) {
447 LAllocator::TraceAlloc("Add to live range %d use position %d\n",
448 id_,
449 pos.Value());
450 UsePosition* use_pos = new UsePosition(pos, operand);
451 UsePosition* prev = NULL;
452 UsePosition* current = first_pos_;
453 while (current != NULL && current->pos().Value() < pos.Value()) {
454 prev = current;
455 current = current->next();
456 }
457
458 if (prev == NULL) {
459 use_pos->set_next(first_pos_);
460 first_pos_ = use_pos;
461 } else {
462 use_pos->next_ = prev->next_;
463 prev->next_ = use_pos;
464 }
465
466 return use_pos;
467}
468
469
470void LiveRange::ConvertOperands() {
471 LOperand* op = CreateAssignedOperand();
472 UsePosition* use_pos = first_pos();
473 while (use_pos != NULL) {
474 ASSERT(Start().Value() <= use_pos->pos().Value() &&
475 use_pos->pos().Value() <= End().Value());
476
477 if (use_pos->HasOperand()) {
478 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
479 !use_pos->RequiresRegister());
480 use_pos->operand()->ConvertTo(op->kind(), op->index());
481 }
482 use_pos = use_pos->next();
483 }
484}
485
486
487UsePosition* LiveRange::AddUsePosition(LifetimePosition pos) {
488 return AddUsePosition(pos, CreateAssignedOperand());
489}
490
491
492bool LiveRange::CanCover(LifetimePosition position) const {
493 if (IsEmpty()) return false;
494 return Start().Value() <= position.Value() &&
495 position.Value() < End().Value();
496}
497
498
499bool LiveRange::Covers(LifetimePosition position) {
500 if (!CanCover(position)) return false;
501 UseInterval* start_search = FirstSearchIntervalForPosition(position);
502 for (UseInterval* interval = start_search;
503 interval != NULL;
504 interval = interval->next()) {
505 ASSERT(interval->next() == NULL ||
506 interval->next()->start().Value() >= interval->start().Value());
507 AdvanceLastProcessedMarker(interval, position);
508 if (interval->Contains(position)) return true;
509 if (interval->start().Value() > position.Value()) return false;
510 }
511 return false;
512}
513
514
515LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
516 UseInterval* b = other->first_interval();
517 if (b == NULL) return LifetimePosition::Invalid();
518 LifetimePosition advance_last_processed_up_to = b->start();
519 UseInterval* a = FirstSearchIntervalForPosition(b->start());
520 while (a != NULL && b != NULL) {
521 if (a->start().Value() > other->End().Value()) break;
522 if (b->start().Value() > End().Value()) break;
523 LifetimePosition cur_intersection = a->Intersect(b);
524 if (cur_intersection.IsValid()) {
525 return cur_intersection;
526 }
527 if (a->start().Value() < b->start().Value()) {
528 a = a->next();
529 if (a == NULL || a->start().Value() > other->End().Value()) break;
530 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
531 } else {
532 b = b->next();
533 }
534 }
535 return LifetimePosition::Invalid();
536}
537
538
539void LAllocator::InitializeLivenessAnalysis() {
540 // Initialize the live_in sets for each block to NULL.
541 int block_count = graph()->blocks()->length();
542 live_in_sets_.Initialize(block_count);
543 live_in_sets_.AddBlock(NULL, block_count);
544}
545
546
547BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
548 // Compute live out for the given block, except not including backward
549 // successor edges.
550 BitVector* live_out = new BitVector(next_virtual_register_);
551
552 // Process all successor blocks.
553 HBasicBlock* successor = block->end()->FirstSuccessor();
554 while (successor != NULL) {
555 // Add values live on entry to the successor. Note the successor's
556 // live_in will not be computed yet for backwards edges.
557 BitVector* live_in = live_in_sets_[successor->block_id()];
558 if (live_in != NULL) live_out->Union(*live_in);
559
560 // All phi input operands corresponding to this successor edge are live
561 // out from this block.
562 int index = successor->PredecessorIndexOf(block);
563 const ZoneList<HPhi*>* phis = successor->phis();
564 for (int i = 0; i < phis->length(); ++i) {
565 HPhi* phi = phis->at(i);
566 if (!phi->OperandAt(index)->IsConstant()) {
567 live_out->Add(phi->OperandAt(index)->id());
568 }
569 }
570
571 // Check if we are done with second successor.
572 if (successor == block->end()->SecondSuccessor()) break;
573
574 successor = block->end()->SecondSuccessor();
575 }
576
577 return live_out;
578}
579
580
581void LAllocator::AddInitialIntervals(HBasicBlock* block,
582 BitVector* live_out) {
583 // Add an interval that includes the entire block to the live range for
584 // each live_out value.
585 LifetimePosition start = LifetimePosition::FromInstructionIndex(
586 block->first_instruction_index());
587 LifetimePosition end = LifetimePosition::FromInstructionIndex(
588 block->last_instruction_index()).NextInstruction();
589 BitVector::Iterator iterator(live_out);
590 while (!iterator.Done()) {
591 int operand_index = iterator.Current();
592 LiveRange* range = LiveRangeFor(operand_index);
593 range->AddUseInterval(start, end);
594 iterator.Advance();
595 }
596}
597
598
599int LAllocator::FixedDoubleLiveRangeID(int index) {
600 return -index - 1 - Register::kNumAllocatableRegisters;
601}
602
603
604LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
605 int pos,
606 bool is_tagged) {
607 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
608 ASSERT(operand->HasFixedPolicy());
609 if (operand->policy() == LUnallocated::FIXED_SLOT) {
610 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
611 } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
612 int reg_index = operand->fixed_index();
613 operand->ConvertTo(LOperand::REGISTER, reg_index);
614 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
615 int reg_index = operand->fixed_index();
616 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
617 } else {
618 UNREACHABLE();
619 }
620 if (is_tagged) {
621 TraceAlloc("Fixed reg is tagged at %d\n", pos);
622 LInstruction* instr = chunk_->instructions()->at(pos);
623 if (instr->HasPointerMap()) {
624 instr->pointer_map()->RecordPointer(operand);
625 }
626 }
627 return operand;
628}
629
630
631LiveRange* LAllocator::FixedLiveRangeFor(int index) {
632 if (index >= fixed_live_ranges_.length()) {
633 fixed_live_ranges_.AddBlock(NULL,
634 index - fixed_live_ranges_.length() + 1);
635 }
636
637 LiveRange* result = fixed_live_ranges_[index];
638 if (result == NULL) {
639 result = new LiveRange(FixedLiveRangeID(index));
640 ASSERT(result->IsFixed());
641 result->set_assigned_register(index, GENERAL_REGISTERS);
642 fixed_live_ranges_[index] = result;
643 }
644 return result;
645}
646
647
648LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
649 if (index >= fixed_double_live_ranges_.length()) {
650 fixed_double_live_ranges_.AddBlock(NULL,
651 index - fixed_double_live_ranges_.length() + 1);
652 }
653
654 LiveRange* result = fixed_double_live_ranges_[index];
655 if (result == NULL) {
656 result = new LiveRange(FixedDoubleLiveRangeID(index));
657 ASSERT(result->IsFixed());
658 result->set_assigned_register(index, DOUBLE_REGISTERS);
659 fixed_double_live_ranges_[index] = result;
660 }
661 return result;
662}
663
664LiveRange* LAllocator::LiveRangeFor(int index) {
665 if (index >= live_ranges_.length()) {
666 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
667 }
668 LiveRange* result = live_ranges_[index];
669 if (result == NULL) {
670 result = new LiveRange(index);
671 live_ranges_[index] = result;
672 }
673 return result;
674}
675
676
677LGap* LAllocator::GetLastGap(HBasicBlock* block) const {
678 int last_instruction = block->last_instruction_index();
679 int index = chunk_->NearestGapPos(last_instruction);
680 return chunk_->GetGapAt(index);
681}
682
683
684HPhi* LAllocator::LookupPhi(LOperand* operand) const {
685 if (!operand->IsUnallocated()) return NULL;
686 int index = operand->VirtualRegister();
687 HValue* instr = graph()->LookupValue(index);
688 if (instr != NULL && instr->IsPhi()) {
689 return HPhi::cast(instr);
690 }
691 return NULL;
692}
693
694
695LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
696 if (operand->IsUnallocated()) {
697 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
698 } else if (operand->IsRegister()) {
699 return FixedLiveRangeFor(operand->index());
700 } else if (operand->IsDoubleRegister()) {
701 return FixedDoubleLiveRangeFor(operand->index());
702 } else {
703 return NULL;
704 }
705}
706
707
708void LAllocator::Define(LifetimePosition position,
709 LOperand* operand,
710 LOperand* hint) {
711 LiveRange* range = LiveRangeFor(operand);
712 if (range == NULL) return;
713
714 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
715 // Can happen if there is a definition without use.
716 range->AddUseInterval(position, position.NextInstruction());
717 range->AddUsePosition(position.NextInstruction(), NULL);
718 } else {
719 range->ShortenTo(position);
720 }
721
722 if (operand->IsUnallocated()) {
723 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
724 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
725 }
726}
727
728
729void LAllocator::Use(LifetimePosition block_start,
730 LifetimePosition position,
731 LOperand* operand,
732 LOperand* hint) {
733 LiveRange* range = LiveRangeFor(operand);
734 if (range == NULL) return;
735 if (operand->IsUnallocated()) {
736 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
737 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
738 }
739 range->AddUseInterval(block_start, position);
740}
741
742
743void LAllocator::AddConstraintsGapMove(int index,
744 LOperand* from,
745 LOperand* to) {
746 LGap* gap = chunk_->GetGapAt(index);
747 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
748 if (from->IsUnallocated()) {
749 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
750 for (int i = 0; i < move_operands->length(); ++i) {
751 LMoveOperands cur = move_operands->at(i);
752 LOperand* cur_to = cur.to();
753 if (cur_to->IsUnallocated()) {
754 if (cur_to->VirtualRegister() == from->VirtualRegister()) {
755 move->AddMove(cur.from(), to);
756 return;
757 }
758 }
759 }
760 }
761 move->AddMove(from, to);
762}
763
764
765void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
766 int start = block->first_instruction_index();
767 int end = block->last_instruction_index();
768 for (int i = start; i <= end; ++i) {
769 if (chunk_->IsGapAt(i)) {
770 InstructionSummary* summary = NULL;
771 InstructionSummary* prev_summary = NULL;
772 if (i < end) summary = GetSummary(i + 1);
773 if (i > start) prev_summary = GetSummary(i - 1);
774 MeetConstraintsBetween(prev_summary, summary, i);
775 }
776 }
777}
778
779
780void LAllocator::MeetConstraintsBetween(InstructionSummary* first,
781 InstructionSummary* second,
782 int gap_index) {
783 // Handle fixed temporaries.
784 if (first != NULL) {
785 for (int i = 0; i < first->TempCount(); ++i) {
786 LUnallocated* temp = LUnallocated::cast(first->TempAt(i));
787 if (temp->HasFixedPolicy()) {
788 AllocateFixed(temp, gap_index - 1, false);
789 }
790 }
791 }
792
793 // Handle fixed output operand.
794 if (first != NULL && first->Output() != NULL) {
795 LUnallocated* first_output = LUnallocated::cast(first->Output());
796 LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
797 bool assigned = false;
798 if (first_output->HasFixedPolicy()) {
799 LUnallocated* output_copy = first_output->CopyUnconstrained();
800 bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
801 AllocateFixed(first_output, gap_index, is_tagged);
802
803 // This value is produced on the stack, we never need to spill it.
804 if (first_output->IsStackSlot()) {
805 range->SetSpillOperand(first_output);
806 range->SetSpillStartIndex(gap_index - 1);
807 assigned = true;
808 }
809 chunk_->AddGapMove(gap_index, first_output, output_copy);
810 }
811
812 if (!assigned) {
813 range->SetSpillStartIndex(gap_index);
814
815 // This move to spill operand is not a real use. Liveness analysis
816 // and splitting of live ranges do not account for it.
817 // Thus it should be inserted to a lifetime position corresponding to
818 // the instruction end.
819 LGap* gap = chunk_->GetGapAt(gap_index);
820 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
821 move->AddMove(first_output, range->GetSpillOperand());
822 }
823 }
824
825 // Handle fixed input operands of second instruction.
826 if (second != NULL) {
827 for (int i = 0; i < second->InputCount(); ++i) {
828 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i));
829 if (cur_input->HasFixedPolicy()) {
830 LUnallocated* input_copy = cur_input->CopyUnconstrained();
831 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
832 AllocateFixed(cur_input, gap_index + 1, is_tagged);
833 AddConstraintsGapMove(gap_index, input_copy, cur_input);
834 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
835 LUnallocated* input_copy = cur_input->CopyUnconstrained();
836 cur_input->set_virtual_register(next_virtual_register_++);
837 second->AddTemp(cur_input);
838 AddConstraintsGapMove(gap_index, input_copy, cur_input);
839 }
840 }
841 }
842
843 // Handle "output same as input" for second instruction.
844 if (second != NULL && second->Output() != NULL) {
845 LUnallocated* second_output = LUnallocated::cast(second->Output());
846 if (second_output->HasSameAsInputPolicy()) {
847 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0));
848 int output_vreg = second_output->VirtualRegister();
849 int input_vreg = cur_input->VirtualRegister();
850
851 LUnallocated* input_copy = cur_input->CopyUnconstrained();
852 cur_input->set_virtual_register(second_output->virtual_register());
853 AddConstraintsGapMove(gap_index, input_copy, cur_input);
854
855 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
856 int index = gap_index + 1;
857 LInstruction* instr = chunk_->instructions()->at(index);
858 if (instr->HasPointerMap()) {
859 instr->pointer_map()->RecordPointer(input_copy);
860 }
861 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
862 // The input is assumed to immediately have a tagged representation,
863 // before the pointer map can be used. I.e. the pointer map at the
864 // instruction will include the output operand (whose value at the
865 // beginning of the instruction is equal to the input operand). If
866 // this is not desired, then the pointer map at this instruction needs
867 // to be adjusted manually.
868 }
869 }
870 }
871}
872
873
874void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
875 int block_start = block->first_instruction_index();
876 int index = block->last_instruction_index();
877
878 LifetimePosition block_start_position =
879 LifetimePosition::FromInstructionIndex(block_start);
880
881 while (index >= block_start) {
882 LifetimePosition curr_position =
883 LifetimePosition::FromInstructionIndex(index);
884
885 if (chunk_->IsGapAt(index)) {
886 // We have a gap at this position.
887 LGap* gap = chunk_->GetGapAt(index);
888 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
889 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
890 for (int i = 0; i < move_operands->length(); ++i) {
891 LMoveOperands* cur = &move_operands->at(i);
892 if (cur->IsIgnored()) continue;
893 LOperand* from = cur->from();
894 LOperand* to = cur->to();
895 HPhi* phi = LookupPhi(to);
896 LOperand* hint = to;
897 if (phi != NULL) {
898 // This is a phi resolving move.
899 if (!phi->block()->IsLoopHeader()) {
900 hint = LiveRangeFor(phi->id())->FirstHint();
901 }
902 } else {
903 if (to->IsUnallocated()) {
904 if (live->Contains(to->VirtualRegister())) {
905 Define(curr_position, to, from);
906 live->Remove(to->VirtualRegister());
907 } else {
908 cur->Eliminate();
909 continue;
910 }
911 } else {
912 Define(curr_position, to, from);
913 }
914 }
915 Use(block_start_position, curr_position, from, hint);
916 if (from->IsUnallocated()) {
917 live->Add(from->VirtualRegister());
918 }
919 }
920 } else {
921 ASSERT(!chunk_->IsGapAt(index));
922 InstructionSummary* summary = GetSummary(index);
923
924 if (summary != NULL) {
925 LOperand* output = summary->Output();
926 if (output != NULL) {
927 if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
928 Define(curr_position, output, NULL);
929 }
930
931 if (summary->IsCall()) {
932 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
933 if (output == NULL || !output->IsRegister() ||
934 output->index() != i) {
935 LiveRange* range = FixedLiveRangeFor(i);
936 range->AddUseInterval(curr_position,
937 curr_position.InstructionEnd());
938 }
939 }
940 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
941 if (output == NULL || !output->IsDoubleRegister() ||
942 output->index() != i) {
943 LiveRange* range = FixedDoubleLiveRangeFor(i);
944 range->AddUseInterval(curr_position,
945 curr_position.InstructionEnd());
946 }
947 }
948 }
949
950 for (int i = 0; i < summary->InputCount(); ++i) {
951 LOperand* input = summary->InputAt(i);
952
953 LifetimePosition use_pos;
954 if (input->IsUnallocated() &&
955 LUnallocated::cast(input)->IsUsedAtStart()) {
956 use_pos = curr_position;
957 } else {
958 use_pos = curr_position.InstructionEnd();
959 }
960
961 Use(block_start_position, use_pos, input, NULL);
962 if (input->IsUnallocated()) live->Add(input->VirtualRegister());
963 }
964
965 for (int i = 0; i < summary->TempCount(); ++i) {
966 LOperand* temp = summary->TempAt(i);
967 if (summary->IsCall()) {
968 if (temp->IsRegister()) continue;
969 if (temp->IsUnallocated()) {
970 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
971 if (temp_unalloc->HasFixedPolicy()) {
972 continue;
973 }
974 }
975 }
976 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
977 Define(curr_position, temp, NULL);
978 }
979 }
980 }
981
982 index = index - 1;
983 }
984}
985
986
987void LAllocator::ResolvePhis(HBasicBlock* block) {
988 const ZoneList<HPhi*>* phis = block->phis();
989 for (int i = 0; i < phis->length(); ++i) {
990 HPhi* phi = phis->at(i);
991 LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
992 phi_operand->set_virtual_register(phi->id());
993 for (int j = 0; j < phi->OperandCount(); ++j) {
994 HValue* op = phi->OperandAt(j);
995 LOperand* operand = NULL;
996 if (op->IsConstant() && op->EmitAtUses()) {
997 HConstant* constant = HConstant::cast(op);
998 operand = chunk_->DefineConstantOperand(constant);
999 } else {
1000 ASSERT(!op->EmitAtUses());
1001 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
1002 unalloc->set_virtual_register(op->id());
1003 operand = unalloc;
1004 }
1005 HBasicBlock* cur_block = block->predecessors()->at(j);
1006 // The gap move must be added without any special processing as in
1007 // the AddConstraintsGapMove.
1008 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1009 operand,
1010 phi_operand);
1011 }
1012
1013 LiveRange* live_range = LiveRangeFor(phi->id());
1014 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1015 label->GetOrCreateParallelMove(LGap::START)->
1016 AddMove(phi_operand, live_range->GetSpillOperand());
1017 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1018 }
1019}
1020
1021
1022void LAllocator::Allocate(LChunk* chunk) {
1023 ASSERT(chunk_ == NULL);
1024 chunk_ = chunk;
1025 MeetRegisterConstraints();
1026 ResolvePhis();
1027 BuildLiveRanges();
1028 AllocateGeneralRegisters();
1029 AllocateDoubleRegisters();
1030 PopulatePointerMaps();
1031 if (has_osr_entry_) ProcessOsrEntry();
1032 ConnectRanges();
1033 ResolveControlFlow();
1034}
1035
1036
1037void LAllocator::MeetRegisterConstraints() {
1038 HPhase phase("Register constraints", chunk());
1039 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1040 for (int i = 0; i < blocks->length(); ++i) {
1041 HBasicBlock* block = blocks->at(i);
1042 MeetRegisterConstraints(block);
1043 }
1044}
1045
1046
1047void LAllocator::ResolvePhis() {
1048 HPhase phase("Resolve phis", chunk());
1049
1050 // Process the blocks in reverse order.
1051 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1052 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1053 HBasicBlock* block = blocks->at(block_id);
1054 ResolvePhis(block);
1055 }
1056}
1057
1058
1059void LAllocator::ResolveControlFlow(LiveRange* range,
1060 HBasicBlock* block,
1061 HBasicBlock* pred) {
1062 LifetimePosition pred_end =
1063 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()).
1064 PrevInstruction();
1065
1066 LifetimePosition cur_start =
1067 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1068 LiveRange* pred_cover = NULL;
1069 LiveRange* cur_cover = NULL;
1070 LiveRange* cur_range = range;
1071 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1072 if (cur_range->CanCover(cur_start)) {
1073 ASSERT(cur_cover == NULL);
1074 cur_cover = cur_range;
1075 }
1076 if (cur_range->CanCover(pred_end)) {
1077 ASSERT(pred_cover == NULL);
1078 pred_cover = cur_range;
1079 }
1080 cur_range = cur_range->next();
1081 }
1082
1083 if (cur_cover->IsSpilled()) return;
1084 ASSERT(pred_cover != NULL && cur_cover != NULL);
1085 if (pred_cover != cur_cover) {
1086 LOperand* pred_op = pred_cover->CreateAssignedOperand();
1087 LOperand* cur_op = cur_cover->CreateAssignedOperand();
1088 if (!pred_op->Equals(cur_op)) {
1089 LGap* gap = NULL;
1090 if (block->predecessors()->length() == 1) {
1091 gap = chunk_->GetGapAt(block->first_instruction_index());
1092 } else {
1093 ASSERT(pred->end()->SecondSuccessor() == NULL);
1094 gap = GetLastGap(pred);
1095 }
1096 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1097 }
1098 }
1099}
1100
1101
1102LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1103 int index = pos.InstructionIndex();
1104 if (chunk_->IsGapAt(index)) {
1105 LGap* gap = chunk_->GetGapAt(index);
1106 return gap->GetOrCreateParallelMove(
1107 pos.IsInstructionStart() ? LGap::START : LGap::END);
1108 }
1109 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1110 return chunk_->GetGapAt(gap_pos)->GetOrCreateParallelMove(
1111 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1112}
1113
1114
1115HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1116 LGap* gap = chunk_->GetGapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1117 return gap->block();
1118}
1119
1120
1121void LAllocator::ConnectRanges() {
1122 HPhase phase("Connect ranges", this);
1123 for (int i = 0; i < live_ranges()->length(); ++i) {
1124 LiveRange* first_range = live_ranges()->at(i);
1125 if (first_range == NULL || first_range->parent() != NULL) continue;
1126
1127 LiveRange* second_range = first_range->next();
1128 while (second_range != NULL) {
1129 LifetimePosition pos = second_range->Start();
1130
1131 if (!second_range->IsSpilled()) {
1132 // Add gap move if the two live ranges touch and there is no block
1133 // boundary.
1134 if (first_range->End().Value() == pos.Value()) {
1135 bool should_insert = true;
1136 if (IsBlockBoundary(pos)) {
1137 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1138 }
1139 if (should_insert) {
1140 LParallelMove* move = GetConnectingParallelMove(pos);
1141 LOperand* prev_operand = first_range->CreateAssignedOperand();
1142 LOperand* cur_operand = second_range->CreateAssignedOperand();
1143 move->AddMove(prev_operand, cur_operand);
1144 }
1145 }
1146 }
1147
1148 first_range = second_range;
1149 second_range = second_range->next();
1150 }
1151 }
1152}
1153
1154
1155bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1156 if (block->predecessors()->length() != 1) return false;
1157 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1158}
1159
1160
1161void LAllocator::ResolveControlFlow() {
1162 HPhase phase("Resolve control flow", this);
1163 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1164 for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1165 HBasicBlock* block = blocks->at(block_id);
1166 if (CanEagerlyResolveControlFlow(block)) continue;
1167 BitVector* live = live_in_sets_[block->block_id()];
1168 BitVector::Iterator iterator(live);
1169 while (!iterator.Done()) {
1170 int operand_index = iterator.Current();
1171 for (int i = 0; i < block->predecessors()->length(); ++i) {
1172 HBasicBlock* cur = block->predecessors()->at(i);
1173 LiveRange* cur_range = LiveRangeFor(operand_index);
1174 ResolveControlFlow(cur_range, block, cur);
1175 }
1176 iterator.Advance();
1177 }
1178 }
1179}
1180
1181
1182void LAllocator::BuildLiveRanges() {
1183 HPhase phase("Build live ranges", this);
1184 InitializeLivenessAnalysis();
1185 // Process the blocks in reverse order.
1186 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1187 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1188 HBasicBlock* block = blocks->at(block_id);
1189 BitVector* live = ComputeLiveOut(block);
1190 // Initially consider all live_out values live for the entire block. We
1191 // will shorten these intervals if necessary.
1192 AddInitialIntervals(block, live);
1193
1194 // Process the instructions in reverse order, generating and killing
1195 // live values.
1196 ProcessInstructions(block, live);
1197 // All phi output operands are killed by this block.
1198 const ZoneList<HPhi*>* phis = block->phis();
1199 for (int i = 0; i < phis->length(); ++i) {
1200 // The live range interval already ends at the first instruction of the
1201 // block.
1202 HPhi* phi = phis->at(i);
1203 live->Remove(phi->id());
1204
1205 LOperand* hint = NULL;
1206 LOperand* phi_operand = NULL;
1207 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1208 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1209 for (int j = 0; j < move->move_operands()->length(); ++j) {
1210 LOperand* to = move->move_operands()->at(j).to();
1211 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1212 hint = move->move_operands()->at(j).from();
1213 phi_operand = to;
1214 break;
1215 }
1216 }
1217 ASSERT(hint != NULL);
1218
1219 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1220 block->first_instruction_index());
1221 Define(block_start, phi_operand, hint);
1222 }
1223
1224 // Now live is live_in for this block except not including values live
1225 // out on backward successor edges.
1226 live_in_sets_[block_id] = live;
1227
1228 // If this block is a loop header go back and patch up the necessary
1229 // predecessor blocks.
1230 if (block->IsLoopHeader()) {
1231 // TODO(kmillikin): Need to be able to get the last block of the loop
1232 // in the loop information. Add a live range stretching from the first
1233 // loop instruction to the last for each value live on entry to the
1234 // header.
1235 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1236 BitVector::Iterator iterator(live);
1237 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1238 block->first_instruction_index());
1239 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1240 back_edge->last_instruction_index());
1241 while (!iterator.Done()) {
1242 int operand_index = iterator.Current();
1243 LiveRange* range = LiveRangeFor(operand_index);
1244 range->EnsureInterval(start, end);
1245 iterator.Advance();
1246 }
1247
1248 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1249 live_in_sets_[i]->Union(*live);
1250 }
1251 }
1252
1253#ifdef DEBUG
1254 if (block_id == 0) {
1255 BitVector::Iterator iterator(live);
1256 bool found = false;
1257 while (!iterator.Done()) {
1258 found = true;
1259 int operand_index = iterator.Current();
1260 PrintF("Function: %s\n",
1261 *graph()->info()->function()->debug_name()->ToCString());
1262 PrintF("Value %d used before first definition!\n", operand_index);
1263 LiveRange* range = LiveRangeFor(operand_index);
1264 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1265 iterator.Advance();
1266 }
1267 ASSERT(!found);
1268 }
1269#endif
1270 }
1271}
1272
1273
1274bool LAllocator::SafePointsAreInOrder() const {
1275 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1276 int safe_point = 0;
1277 for (int i = 0; i < pointer_maps->length(); ++i) {
1278 LPointerMap* map = pointer_maps->at(i);
1279 if (safe_point > map->lithium_position()) return false;
1280 safe_point = map->lithium_position();
1281 }
1282 return true;
1283}
1284
1285
1286void LAllocator::PopulatePointerMaps() {
1287 HPhase phase("Populate pointer maps", this);
1288 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1289
1290 ASSERT(SafePointsAreInOrder());
1291
1292 // Iterate over all safe point positions and record a pointer
1293 // for all spilled live ranges at this point.
1294 int first_safe_point_index = 0;
1295 int last_range_start = 0;
1296 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1297 LiveRange* range = live_ranges()->at(range_idx);
1298 if (range == NULL) continue;
1299 // Iterate over the first parts of multi-part live ranges.
1300 if (range->parent() != NULL) continue;
1301 // Skip non-pointer values.
1302 if (!HasTaggedValue(range->id())) continue;
1303 // Skip empty live ranges.
1304 if (range->IsEmpty()) continue;
1305
1306 // Find the extent of the range and its children.
1307 int start = range->Start().InstructionIndex();
1308 int end = 0;
1309 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1310 LifetimePosition this_end = cur->End();
1311 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1312 ASSERT(cur->Start().InstructionIndex() >= start);
1313 }
1314
1315 // Most of the ranges are in order, but not all. Keep an eye on when
1316 // they step backwards and reset the first_safe_point_index so we don't
1317 // miss any safe points.
1318 if (start < last_range_start) {
1319 first_safe_point_index = 0;
1320 }
1321 last_range_start = start;
1322
1323 // Step across all the safe points that are before the start of this range,
1324 // recording how far we step in order to save doing this for the next range.
1325 while (first_safe_point_index < pointer_maps->length()) {
1326 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1327 int safe_point = map->lithium_position();
1328 if (safe_point >= start) break;
1329 first_safe_point_index++;
1330 }
1331
1332 // Step through the safe points to see whether they are in the range.
1333 for (int safe_point_index = first_safe_point_index;
1334 safe_point_index < pointer_maps->length();
1335 ++safe_point_index) {
1336 LPointerMap* map = pointer_maps->at(safe_point_index);
1337 int safe_point = map->lithium_position();
1338
1339 // The safe points are sorted so we can stop searching here.
1340 if (safe_point - 1 > end) break;
1341
1342 // Advance to the next active range that covers the current
1343 // safe point position.
1344 LifetimePosition safe_point_pos =
1345 LifetimePosition::FromInstructionIndex(safe_point);
1346 LiveRange* cur = range;
1347 while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1348 cur = cur->next();
1349 }
1350 if (cur == NULL) continue;
1351
1352 // Check if the live range is spilled and the safe point is after
1353 // the spill position.
1354 if (range->HasAllocatedSpillOperand() &&
1355 safe_point >= range->spill_start_index()) {
1356 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1357 range->id(), range->spill_start_index(), safe_point);
1358 map->RecordPointer(range->GetSpillOperand());
1359 }
1360
1361 if (!cur->IsSpilled()) {
1362 TraceAlloc("Pointer in register for range %d (start at %d) "
1363 "at safe point %d\n",
1364 cur->id(), cur->Start().Value(), safe_point);
1365 LOperand* operand = cur->CreateAssignedOperand();
1366 ASSERT(!operand->IsStackSlot());
1367 map->RecordPointer(operand);
1368 }
1369 }
1370 }
1371}
1372
1373
1374void LAllocator::ProcessOsrEntry() {
1375 const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1376
1377 // Linear search for the OSR entry instruction in the chunk.
1378 int index = -1;
1379 while (++index < instrs->length() &&
1380 !instrs->at(index)->IsOsrEntry()) {
1381 }
1382 ASSERT(index < instrs->length());
1383 LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1384
1385 LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1386 for (int i = 0; i < live_ranges()->length(); ++i) {
1387 LiveRange* range = live_ranges()->at(i);
1388 if (range != NULL) {
1389 if (range->Covers(position) &&
1390 range->HasRegisterAssigned() &&
1391 range->TopLevel()->HasAllocatedSpillOperand()) {
1392 int reg_index = range->assigned_register();
1393 LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1394 if (range->IsDouble()) {
1395 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1396 } else {
1397 instruction->MarkSpilledRegister(reg_index, spill_operand);
1398 }
1399 }
1400 }
1401 }
1402}
1403
1404
1405void LAllocator::AllocateGeneralRegisters() {
1406 HPhase phase("Allocate general registers", this);
1407 num_registers_ = Register::kNumAllocatableRegisters;
1408 mode_ = GENERAL_REGISTERS;
1409 AllocateRegisters();
1410}
1411
1412
1413void LAllocator::AllocateDoubleRegisters() {
1414 HPhase phase("Allocate double registers", this);
1415 num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1416 mode_ = DOUBLE_REGISTERS;
1417 AllocateRegisters();
1418}
1419
1420
1421void LAllocator::AllocateRegisters() {
1422 ASSERT(mode_ != NONE);
1423 reusable_slots_.Clear();
1424
1425 for (int i = 0; i < live_ranges_.length(); ++i) {
1426 if (live_ranges_[i] != NULL) {
1427 if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1428 AddToUnhandledUnsorted(live_ranges_[i]);
1429 }
1430 }
1431 }
1432 SortUnhandled();
1433 ASSERT(UnhandledIsSorted());
1434
1435 ASSERT(active_live_ranges_.is_empty());
1436 ASSERT(inactive_live_ranges_.is_empty());
1437
1438 if (mode_ == DOUBLE_REGISTERS) {
1439 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1440 LiveRange* current = fixed_double_live_ranges_.at(i);
1441 if (current != NULL) {
1442 AddToInactive(current);
1443 }
1444 }
1445 } else {
1446 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1447 LiveRange* current = fixed_live_ranges_.at(i);
1448 if (current != NULL) {
1449 AddToInactive(current);
1450 }
1451 }
1452 }
1453
1454 while (!unhandled_live_ranges_.is_empty()) {
1455 ASSERT(UnhandledIsSorted());
1456 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1457 ASSERT(UnhandledIsSorted());
1458 LifetimePosition position = current->Start();
1459 TraceAlloc("Processing interval %d start=%d\n",
1460 current->id(),
1461 position.Value());
1462
1463 if (current->HasAllocatedSpillOperand()) {
1464 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1465 LifetimePosition next_pos = position;
1466 if (chunk_->IsGapAt(next_pos.InstructionIndex())) {
1467 next_pos = next_pos.NextInstruction();
1468 }
1469 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1470 // If the range already has a spill operand and it doesn't need a
1471 // register immediately, split it and spill the first part of the range.
1472 if (pos == NULL) {
1473 Spill(current);
1474 continue;
1475 } else if (pos->pos().Value() >
1476 current->Start().NextInstruction().Value()) {
1477 // Do not spill live range eagerly if use position that can benefit from
1478 // the register is too close to the start of live range.
1479 SpillBetween(current, current->Start(), pos->pos());
1480 ASSERT(UnhandledIsSorted());
1481 continue;
1482 }
1483 }
1484
1485 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1486 LiveRange* cur_active = active_live_ranges_.at(i);
1487 if (cur_active->End().Value() <= position.Value()) {
1488 ActiveToHandled(cur_active);
1489 --i; // The live range was removed from the list of active live ranges.
1490 } else if (!cur_active->Covers(position)) {
1491 ActiveToInactive(cur_active);
1492 --i; // The live range was removed from the list of active live ranges.
1493 }
1494 }
1495
1496 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1497 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1498 if (cur_inactive->End().Value() <= position.Value()) {
1499 InactiveToHandled(cur_inactive);
1500 --i; // Live range was removed from the list of inactive live ranges.
1501 } else if (cur_inactive->Covers(position)) {
1502 InactiveToActive(cur_inactive);
1503 --i; // Live range was removed from the list of inactive live ranges.
1504 }
1505 }
1506
1507 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1508
1509 bool result = TryAllocateFreeReg(current);
1510 if (!result) {
1511 AllocateBlockedReg(current);
1512 }
1513
1514 if (current->HasRegisterAssigned()) {
1515 AddToActive(current);
1516 }
1517 }
1518
1519 active_live_ranges_.Clear();
1520 inactive_live_ranges_.Clear();
1521}
1522
1523
1524void LAllocator::Setup() {
1525 LConstantOperand::SetupCache();
1526 LStackSlot::SetupCache();
1527 LDoubleStackSlot::SetupCache();
1528 LRegister::SetupCache();
1529 LDoubleRegister::SetupCache();
1530}
1531
1532
1533const char* LAllocator::RegisterName(int allocation_index) {
1534 ASSERT(mode_ != NONE);
1535 if (mode_ == GENERAL_REGISTERS) {
1536 return Register::AllocationIndexToString(allocation_index);
1537 } else {
1538 return DoubleRegister::AllocationIndexToString(allocation_index);
1539 }
1540}
1541
1542
1543void LAllocator::TraceAlloc(const char* msg, ...) {
1544 if (FLAG_trace_alloc) {
1545 va_list arguments;
1546 va_start(arguments, msg);
1547 OS::VPrint(msg, arguments);
1548 va_end(arguments);
1549 }
1550}
1551
1552
1553void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1554 operand->set_virtual_register(value->id());
1555 current_summary()->AddInput(operand);
1556}
1557
1558
1559bool LAllocator::HasTaggedValue(int virtual_register) const {
1560 HValue* value = graph()->LookupValue(virtual_register);
1561 if (value == NULL) return false;
1562 return value->representation().IsTagged();
1563}
1564
1565
1566RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1567 HValue* value = graph()->LookupValue(virtual_register);
1568 if (value != NULL && value->representation().IsDouble()) {
1569 return DOUBLE_REGISTERS;
1570 }
1571 return GENERAL_REGISTERS;
1572}
1573
1574
1575void LAllocator::MarkAsCall() {
1576 current_summary()->MarkAsCall();
1577}
1578
1579
1580void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1581 operand->set_virtual_register(instr->id());
1582 current_summary()->SetOutput(operand);
1583}
1584
1585
1586void LAllocator::RecordTemporary(LUnallocated* operand) {
1587 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
1588 if (!operand->HasFixedPolicy()) {
1589 operand->set_virtual_register(next_virtual_register_++);
1590 }
1591 current_summary()->AddTemp(operand);
1592}
1593
1594
1595int LAllocator::max_initial_value_ids() {
1596 return LUnallocated::kMaxVirtualRegisters / 32;
1597}
1598
1599
1600void LAllocator::BeginInstruction() {
1601 if (next_summary_ == NULL) {
1602 next_summary_ = new InstructionSummary();
1603 }
1604 summary_stack_.Add(next_summary_);
1605 next_summary_ = NULL;
1606}
1607
1608
1609void LAllocator::SummarizeInstruction(int index) {
1610 InstructionSummary* sum = summary_stack_.RemoveLast();
1611 if (summaries_.length() <= index) {
1612 summaries_.AddBlock(NULL, index + 1 - summaries_.length());
1613 }
1614 ASSERT(summaries_[index] == NULL);
1615 if (sum->Output() != NULL || sum->InputCount() > 0 || sum->TempCount() > 0) {
1616 summaries_[index] = sum;
1617 } else {
1618 next_summary_ = sum;
1619 }
1620}
1621
1622
1623void LAllocator::OmitInstruction() {
1624 summary_stack_.RemoveLast();
1625}
1626
1627
1628void LAllocator::AddToActive(LiveRange* range) {
1629 TraceAlloc("Add live range %d to active\n", range->id());
1630 active_live_ranges_.Add(range);
1631}
1632
1633
1634void LAllocator::AddToInactive(LiveRange* range) {
1635 TraceAlloc("Add live range %d to inactive\n", range->id());
1636 inactive_live_ranges_.Add(range);
1637}
1638
1639
1640void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1641 if (range == NULL || range->IsEmpty()) return;
1642 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1643 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1644 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1645 if (range->ShouldBeAllocatedBefore(cur_range)) {
1646 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1647 unhandled_live_ranges_.InsertAt(i + 1, range);
1648 ASSERT(UnhandledIsSorted());
1649 return;
1650 }
1651 }
1652 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1653 unhandled_live_ranges_.InsertAt(0, range);
1654 ASSERT(UnhandledIsSorted());
1655}
1656
1657
1658void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1659 if (range == NULL || range->IsEmpty()) return;
1660 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1661 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1662 unhandled_live_ranges_.Add(range);
1663}
1664
1665
1666static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1667 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1668 !(*b)->ShouldBeAllocatedBefore(*a));
1669 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1670 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1671 return (*a)->id() - (*b)->id();
1672}
1673
1674
1675// Sort the unhandled live ranges so that the ranges to be processed first are
1676// at the end of the array list. This is convenient for the register allocation
1677// algorithm because it is efficient to remove elements from the end.
1678void LAllocator::SortUnhandled() {
1679 TraceAlloc("Sort unhandled\n");
1680 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1681}
1682
1683
1684bool LAllocator::UnhandledIsSorted() {
1685 int len = unhandled_live_ranges_.length();
1686 for (int i = 1; i < len; i++) {
1687 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1688 LiveRange* b = unhandled_live_ranges_.at(i);
1689 if (a->Start().Value() < b->Start().Value()) return false;
1690 }
1691 return true;
1692}
1693
1694
1695void LAllocator::FreeSpillSlot(LiveRange* range) {
1696 // Check that we are the last range.
1697 if (range->next() != NULL) return;
1698
1699 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1700
1701 int index = range->TopLevel()->GetSpillOperand()->index();
1702 if (index >= 0) {
1703 reusable_slots_.Add(range);
1704 }
1705}
1706
1707
1708LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1709 if (reusable_slots_.is_empty()) return NULL;
1710 if (reusable_slots_.first()->End().Value() >
1711 range->TopLevel()->Start().Value()) {
1712 return NULL;
1713 }
1714 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1715 reusable_slots_.Remove(0);
1716 return result;
1717}
1718
1719
1720void LAllocator::ActiveToHandled(LiveRange* range) {
1721 ASSERT(active_live_ranges_.Contains(range));
1722 active_live_ranges_.RemoveElement(range);
1723 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1724 FreeSpillSlot(range);
1725}
1726
1727
1728void LAllocator::ActiveToInactive(LiveRange* range) {
1729 ASSERT(active_live_ranges_.Contains(range));
1730 active_live_ranges_.RemoveElement(range);
1731 inactive_live_ranges_.Add(range);
1732 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1733}
1734
1735
1736void LAllocator::InactiveToHandled(LiveRange* range) {
1737 ASSERT(inactive_live_ranges_.Contains(range));
1738 inactive_live_ranges_.RemoveElement(range);
1739 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1740 FreeSpillSlot(range);
1741}
1742
1743
1744void LAllocator::InactiveToActive(LiveRange* range) {
1745 ASSERT(inactive_live_ranges_.Contains(range));
1746 inactive_live_ranges_.RemoveElement(range);
1747 active_live_ranges_.Add(range);
1748 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1749}
1750
1751
1752// TryAllocateFreeReg and AllocateBlockedReg assume this
1753// when allocating local arrays.
1754STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1755 Register::kNumAllocatableRegisters);
1756
1757
1758bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1759 LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1760
1761 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1762 free_until_pos[i] = LifetimePosition::MaxPosition();
1763 }
1764
1765 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1766 LiveRange* cur_active = active_live_ranges_.at(i);
1767 free_until_pos[cur_active->assigned_register()] =
1768 LifetimePosition::FromInstructionIndex(0);
1769 }
1770
1771 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1772 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1773 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1774 LifetimePosition next_intersection =
1775 cur_inactive->FirstIntersection(current);
1776 if (!next_intersection.IsValid()) continue;
1777 int cur_reg = cur_inactive->assigned_register();
1778 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1779 }
1780
1781 UsePosition* hinted_use = current->FirstPosWithHint();
1782 if (hinted_use != NULL) {
1783 LOperand* hint = hinted_use->hint();
1784 if (hint->IsRegister() || hint->IsDoubleRegister()) {
1785 int register_index = hint->index();
1786 TraceAlloc(
1787 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1788 RegisterName(register_index),
1789 free_until_pos[register_index].Value(),
1790 current->id(),
1791 current->End().Value());
1792
1793 // The desired register is free until the end of the current live range.
1794 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1795 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1796 RegisterName(register_index),
1797 current->id());
1798 current->set_assigned_register(register_index, mode_);
1799 return true;
1800 }
1801 }
1802 }
1803
1804 // Find the register which stays free for the longest time.
1805 int reg = 0;
1806 for (int i = 1; i < RegisterCount(); ++i) {
1807 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1808 reg = i;
1809 }
1810 }
1811
1812 LifetimePosition pos = free_until_pos[reg];
1813
1814 if (pos.Value() <= current->Start().Value()) {
1815 // All registers are blocked.
1816 return false;
1817 }
1818
1819 if (pos.Value() < current->End().Value()) {
1820 // Register reg is available at the range start but becomes blocked before
1821 // the range end. Split current at position where it becomes blocked.
1822 LiveRange* tail = SplitAt(current, pos);
1823 AddToUnhandledSorted(tail);
1824 }
1825
1826
1827 // Register reg is available at the range start and is free until
1828 // the range end.
1829 ASSERT(pos.Value() >= current->End().Value());
1830 TraceAlloc("Assigning free reg %s to live range %d\n",
1831 RegisterName(reg),
1832 current->id());
1833 current->set_assigned_register(reg, mode_);
1834
1835 return true;
1836}
1837
1838
1839void LAllocator::AllocateBlockedReg(LiveRange* current) {
1840 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1841 if (register_use == NULL) {
1842 // There is no use in the current live range that requires a register.
1843 // We can just spill it.
1844 Spill(current);
1845 return;
1846 }
1847
1848
1849 LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1850 LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1851
1852 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1853 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1854 }
1855
1856 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1857 LiveRange* range = active_live_ranges_[i];
1858 int cur_reg = range->assigned_register();
1859 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1860 block_pos[cur_reg] = use_pos[cur_reg] =
1861 LifetimePosition::FromInstructionIndex(0);
1862 } else {
1863 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1864 current->Start());
1865 if (next_use == NULL) {
1866 use_pos[cur_reg] = range->End();
1867 } else {
1868 use_pos[cur_reg] = next_use->pos();
1869 }
1870 }
1871 }
1872
1873 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1874 LiveRange* range = inactive_live_ranges_.at(i);
1875 ASSERT(range->End().Value() > current->Start().Value());
1876 LifetimePosition next_intersection = range->FirstIntersection(current);
1877 if (!next_intersection.IsValid()) continue;
1878 int cur_reg = range->assigned_register();
1879 if (range->IsFixed()) {
1880 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1881 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1882 } else {
1883 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1884 }
1885 }
1886
1887 int reg = 0;
1888 for (int i = 1; i < RegisterCount(); ++i) {
1889 if (use_pos[i].Value() > use_pos[reg].Value()) {
1890 reg = i;
1891 }
1892 }
1893
1894 LifetimePosition pos = use_pos[reg];
1895
1896 if (pos.Value() < register_use->pos().Value()) {
1897 // All registers are blocked before the first use that requires a register.
1898 // Spill starting part of live range up to that use.
1899 //
1900 // Corner case: the first use position is equal to the start of the range.
1901 // In this case we have nothing to spill and SpillBetween will just return
1902 // this range to the list of unhandled ones. This will lead to the infinite
1903 // loop.
1904 ASSERT(current->Start().Value() < register_use->pos().Value());
1905 SpillBetween(current, current->Start(), register_use->pos());
1906 return;
1907 }
1908
1909 if (block_pos[reg].Value() < current->End().Value()) {
1910 // Register becomes blocked before the current range end. Split before that
1911 // position.
1912 LiveRange* tail = SplitBetween(current,
1913 current->Start(),
1914 block_pos[reg].InstructionStart());
1915 AddToUnhandledSorted(tail);
1916 }
1917
1918 // Register reg is not blocked for the whole range.
1919 ASSERT(block_pos[reg].Value() >= current->End().Value());
1920 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1921 RegisterName(reg),
1922 current->id());
1923 current->set_assigned_register(reg, mode_);
1924
1925 // This register was not free. Thus we need to find and spill
1926 // parts of active and inactive live regions that use the same register
1927 // at the same lifetime positions as current.
1928 SplitAndSpillIntersecting(current);
1929}
1930
1931
1932void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1933 ASSERT(current->HasRegisterAssigned());
1934 int reg = current->assigned_register();
1935 LifetimePosition split_pos = current->Start();
1936 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1937 LiveRange* range = active_live_ranges_[i];
1938 if (range->assigned_register() == reg) {
1939 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1940 if (next_pos == NULL) {
1941 SpillAfter(range, split_pos);
1942 } else {
1943 SpillBetween(range, split_pos, next_pos->pos());
1944 }
1945 ActiveToHandled(range);
1946 --i;
1947 }
1948 }
1949
1950 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1951 LiveRange* range = inactive_live_ranges_[i];
1952 ASSERT(range->End().Value() > current->Start().Value());
1953 if (range->assigned_register() == reg && !range->IsFixed()) {
1954 LifetimePosition next_intersection = range->FirstIntersection(current);
1955 if (next_intersection.IsValid()) {
1956 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1957 if (next_pos == NULL) {
1958 SpillAfter(range, split_pos);
1959 } else {
1960 next_intersection = Min(next_intersection, next_pos->pos());
1961 SpillBetween(range, split_pos, next_intersection);
1962 }
1963 InactiveToHandled(range);
1964 --i;
1965 }
1966 }
1967 }
1968}
1969
1970
1971bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1972 return pos.IsInstructionStart() &&
1973 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
1974}
1975
1976
1977void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
1978 UsePosition* prev_pos = prev->AddUsePosition(
1979 LifetimePosition::FromInstructionIndex(pos));
1980 UsePosition* next_pos = next->AddUsePosition(
1981 LifetimePosition::FromInstructionIndex(pos));
1982 LOperand* prev_operand = prev_pos->operand();
1983 LOperand* next_operand = next_pos->operand();
1984 LGap* gap = chunk_->GetGapAt(pos);
1985 gap->GetOrCreateParallelMove(LGap::START)->
1986 AddMove(prev_operand, next_operand);
1987 next_pos->set_hint(prev_operand);
1988}
1989
1990
1991LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
1992 ASSERT(!range->IsFixed());
1993 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1994
1995 if (pos.Value() <= range->Start().Value()) return range;
1996
1997 LiveRange* result = LiveRangeFor(next_virtual_register_++);
1998 range->SplitAt(pos, result);
1999 return result;
2000}
2001
2002
2003LiveRange* LAllocator::SplitBetween(LiveRange* range,
2004 LifetimePosition start,
2005 LifetimePosition end) {
2006 ASSERT(!range->IsFixed());
2007 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2008 range->id(),
2009 start.Value(),
2010 end.Value());
2011
2012 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2013 ASSERT(split_pos.Value() >= start.Value());
2014 return SplitAt(range, split_pos);
2015}
2016
2017
2018LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2019 LifetimePosition end) {
2020 int start_instr = start.InstructionIndex();
2021 int end_instr = end.InstructionIndex();
2022 ASSERT(start_instr <= end_instr);
2023
2024 // We have no choice
2025 if (start_instr == end_instr) return end;
2026
2027 HBasicBlock* end_block = GetBlock(start);
2028 HBasicBlock* start_block = GetBlock(end);
2029
2030 if (end_block == start_block) {
2031 // The interval is split in the same basic block. Split at latest possible
2032 // position.
2033 return end;
2034 }
2035
2036 HBasicBlock* block = end_block;
2037 // Find header of outermost loop.
2038 while (block->parent_loop_header() != NULL &&
2039 block->parent_loop_header()->block_id() > start_block->block_id()) {
2040 block = block->parent_loop_header();
2041 }
2042
2043 if (block == end_block) return end;
2044
2045 return LifetimePosition::FromInstructionIndex(
2046 block->first_instruction_index());
2047}
2048
2049
2050void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2051 LiveRange* second_part = SplitAt(range, pos);
2052 Spill(second_part);
2053}
2054
2055
2056void LAllocator::SpillBetween(LiveRange* range,
2057 LifetimePosition start,
2058 LifetimePosition end) {
2059 ASSERT(start.Value() < end.Value());
2060 LiveRange* second_part = SplitAt(range, start);
2061
2062 if (second_part->Start().Value() < end.Value()) {
2063 // The split result intersects with [start, end[.
2064 // Split it at position between ]start+1, end[, spill the middle part
2065 // and put the rest to unhandled.
2066 LiveRange* third_part = SplitBetween(
2067 second_part,
2068 second_part->Start().InstructionEnd(),
2069 end.PrevInstruction().InstructionEnd());
2070
2071 ASSERT(third_part != second_part);
2072
2073 Spill(second_part);
2074 AddToUnhandledSorted(third_part);
2075 } else {
2076 // The split result does not intersect with [start, end[.
2077 // Nothing to spill. Just put it to unhandled as whole.
2078 AddToUnhandledSorted(second_part);
2079 }
2080}
2081
2082
2083void LAllocator::Spill(LiveRange* range) {
2084 ASSERT(!range->IsSpilled());
2085 TraceAlloc("Spilling live range %d\n", range->id());
2086 LiveRange* first = range->TopLevel();
2087
2088 if (!first->HasAllocatedSpillOperand()) {
2089 LOperand* op = TryReuseSpillSlot(range);
2090 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2091 first->SetSpillOperand(op);
2092 }
2093 range->MakeSpilled();
2094}
2095
2096
2097int LAllocator::RegisterCount() const {
2098 return num_registers_;
2099}
2100
2101
2102#ifdef DEBUG
2103
2104
2105void LAllocator::Verify() const {
2106 for (int i = 0; i < live_ranges()->length(); ++i) {
2107 LiveRange* current = live_ranges()->at(i);
2108 if (current != NULL) current->Verify();
2109 }
2110}
2111
2112
2113#endif
2114
2115
2116} } // namespace v8::internal