blob: eecc441f4f81fd85221f9956b16d7b839fce19d4 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "lithium-allocator.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "hydrogen.h"
31#include "string-stream.h"
32
33#if V8_TARGET_ARCH_IA32
34#include "ia32/lithium-ia32.h"
35#elif V8_TARGET_ARCH_X64
36#include "x64/lithium-x64.h"
37#elif V8_TARGET_ARCH_ARM
38#include "arm/lithium-arm.h"
39#else
40#error "Unknown architecture."
41#endif
42
43namespace v8 {
44namespace internal {
45
46
47#define DEFINE_OPERAND_CACHE(name, type) \
48 name name::cache[name::kNumCachedOperands]; \
49 void name::SetupCache() { \
50 for (int i = 0; i < kNumCachedOperands; i++) { \
51 cache[i].ConvertTo(type, i); \
52 } \
53 }
54
55DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
56DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
57DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
58DEFINE_OPERAND_CACHE(LRegister, REGISTER)
59DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
60
61#undef DEFINE_OPERAND_CACHE
62
63
64static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
65 return a.Value() < b.Value() ? a : b;
66}
67
68
69static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
70 return a.Value() > b.Value() ? a : b;
71}
72
73
74void LOperand::PrintTo(StringStream* stream) {
75 LUnallocated* unalloc = NULL;
76 switch (kind()) {
77 case INVALID:
78 break;
79 case UNALLOCATED:
80 unalloc = LUnallocated::cast(this);
81 stream->Add("v%d", unalloc->virtual_register());
82 switch (unalloc->policy()) {
83 case LUnallocated::NONE:
84 break;
85 case LUnallocated::FIXED_REGISTER: {
86 const char* register_name =
87 Register::AllocationIndexToString(unalloc->fixed_index());
88 stream->Add("(=%s)", register_name);
89 break;
90 }
91 case LUnallocated::FIXED_DOUBLE_REGISTER: {
92 const char* double_register_name =
93 DoubleRegister::AllocationIndexToString(unalloc->fixed_index());
94 stream->Add("(=%s)", double_register_name);
95 break;
96 }
97 case LUnallocated::FIXED_SLOT:
98 stream->Add("(=%dS)", unalloc->fixed_index());
99 break;
100 case LUnallocated::MUST_HAVE_REGISTER:
101 stream->Add("(R)");
102 break;
103 case LUnallocated::WRITABLE_REGISTER:
104 stream->Add("(WR)");
105 break;
106 case LUnallocated::SAME_AS_FIRST_INPUT:
107 stream->Add("(1)");
108 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100109 case LUnallocated::ANY:
110 stream->Add("(-)");
111 break;
112 case LUnallocated::IGNORE:
113 stream->Add("(0)");
114 break;
115 }
116 break;
117 case CONSTANT_OPERAND:
118 stream->Add("[constant:%d]", index());
119 break;
120 case STACK_SLOT:
121 stream->Add("[stack:%d]", index());
122 break;
123 case DOUBLE_STACK_SLOT:
124 stream->Add("[double_stack:%d]", index());
125 break;
126 case REGISTER:
127 stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
128 break;
129 case DOUBLE_REGISTER:
130 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
131 break;
132 case ARGUMENT:
133 stream->Add("[arg:%d]", index());
134 break;
135 }
136}
137
138int LOperand::VirtualRegister() {
139 LUnallocated* unalloc = LUnallocated::cast(this);
140 return unalloc->virtual_register();
141}
142
143
144bool UsePosition::RequiresRegister() const {
145 return requires_reg_;
146}
147
148
149bool UsePosition::RegisterIsBeneficial() const {
150 return register_beneficial_;
151}
152
153
154void UseInterval::SplitAt(LifetimePosition pos) {
155 ASSERT(Contains(pos) && pos.Value() != start().Value());
156 UseInterval* after = new UseInterval(pos, end_);
157 after->next_ = next_;
158 next_ = after;
159 end_ = pos;
160}
161
162
163#ifdef DEBUG
164
165
166void LiveRange::Verify() const {
167 UsePosition* cur = first_pos_;
168 while (cur != NULL) {
169 ASSERT(Start().Value() <= cur->pos().Value() &&
170 cur->pos().Value() <= End().Value());
171 cur = cur->next();
172 }
173}
174
175
176bool LiveRange::HasOverlap(UseInterval* target) const {
177 UseInterval* current_interval = first_interval_;
178 while (current_interval != NULL) {
179 // Intervals overlap if the start of one is contained in the other.
180 if (current_interval->Contains(target->start()) ||
181 target->Contains(current_interval->start())) {
182 return true;
183 }
184 current_interval = current_interval->next();
185 }
186 return false;
187}
188
189
190#endif
191
192
193UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
194 UsePosition* use_pos = last_processed_use_;
195 if (use_pos == NULL) use_pos = first_pos();
196 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
197 use_pos = use_pos->next();
198 }
199 last_processed_use_ = use_pos;
200 return use_pos;
201}
202
203
204UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
205 LifetimePosition start) {
206 UsePosition* pos = NextUsePosition(start);
207 while (pos != NULL && !pos->RegisterIsBeneficial()) {
208 pos = pos->next();
209 }
210 return pos;
211}
212
213
214UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
215 UsePosition* pos = NextUsePosition(start);
216 while (pos != NULL && !pos->RequiresRegister()) {
217 pos = pos->next();
218 }
219 return pos;
220}
221
222
223bool LiveRange::CanBeSpilled(LifetimePosition pos) {
224 // TODO(kmillikin): Comment. Now.
225 if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
226
227 // We cannot spill a live range that has a use requiring a register
228 // at the current or the immediate next position.
229 UsePosition* use_pos = NextRegisterPosition(pos);
230 if (use_pos == NULL) return true;
231 return use_pos->pos().Value() > pos.NextInstruction().Value();
232}
233
234
235UsePosition* LiveRange::FirstPosWithHint() const {
236 UsePosition* pos = first_pos_;
237 while (pos != NULL && !pos->HasHint()) pos = pos->next();
238 return pos;
239}
240
241
242LOperand* LiveRange::CreateAssignedOperand() {
243 LOperand* op = NULL;
244 if (HasRegisterAssigned()) {
245 ASSERT(!IsSpilled());
246 if (IsDouble()) {
247 op = LDoubleRegister::Create(assigned_register());
248 } else {
249 op = LRegister::Create(assigned_register());
250 }
251 } else if (IsSpilled()) {
252 ASSERT(!HasRegisterAssigned());
253 op = TopLevel()->GetSpillOperand();
254 ASSERT(!op->IsUnallocated());
255 } else {
256 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
257 unalloc->set_virtual_register(id_);
258 op = unalloc;
259 }
260 return op;
261}
262
263
264UseInterval* LiveRange::FirstSearchIntervalForPosition(
265 LifetimePosition position) const {
266 if (current_interval_ == NULL) return first_interval_;
267 if (current_interval_->start().Value() > position.Value()) {
268 current_interval_ = NULL;
269 return first_interval_;
270 }
271 return current_interval_;
272}
273
274
275void LiveRange::AdvanceLastProcessedMarker(
276 UseInterval* to_start_of, LifetimePosition but_not_past) const {
277 if (to_start_of == NULL) return;
278 if (to_start_of->start().Value() > but_not_past.Value()) return;
279 LifetimePosition start =
280 current_interval_ == NULL ? LifetimePosition::Invalid()
281 : current_interval_->start();
282 if (to_start_of->start().Value() > start.Value()) {
283 current_interval_ = to_start_of;
284 }
285}
286
287
288void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
289 ASSERT(Start().Value() < position.Value());
290 ASSERT(result->IsEmpty());
291 // Find the last interval that ends before the position. If the
292 // position is contained in one of the intervals in the chain, we
293 // split that interval and use the first part.
294 UseInterval* current = FirstSearchIntervalForPosition(position);
295
296 // If the split position coincides with the beginning of a use interval
297 // we need to split use positons in a special way.
298 bool split_at_start = false;
299
300 while (current != NULL) {
301 if (current->Contains(position)) {
302 current->SplitAt(position);
303 break;
304 }
305 UseInterval* next = current->next();
306 if (next->start().Value() >= position.Value()) {
307 split_at_start = (next->start().Value() == position.Value());
308 break;
309 }
310 current = next;
311 }
312
313 // Partition original use intervals to the two live ranges.
314 UseInterval* before = current;
315 UseInterval* after = before->next();
316 result->last_interval_ = (last_interval_ == before)
317 ? after // Only interval in the range after split.
318 : last_interval_; // Last interval of the original range.
319 result->first_interval_ = after;
320 last_interval_ = before;
321
322 // Find the last use position before the split and the first use
323 // position after it.
324 UsePosition* use_after = first_pos_;
325 UsePosition* use_before = NULL;
326 if (split_at_start) {
327 // The split position coincides with the beginning of a use interval (the
328 // end of a lifetime hole). Use at this position should be attributed to
329 // the split child because split child owns use interval covering it.
330 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
331 use_before = use_after;
332 use_after = use_after->next();
333 }
334 } else {
335 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
336 use_before = use_after;
337 use_after = use_after->next();
338 }
339 }
340
341 // Partition original use positions to the two live ranges.
342 if (use_before != NULL) {
343 use_before->next_ = NULL;
344 } else {
345 first_pos_ = NULL;
346 }
347 result->first_pos_ = use_after;
348
349 // Link the new live range in the chain before any of the other
350 // ranges linked from the range before the split.
351 result->parent_ = (parent_ == NULL) ? this : parent_;
352 result->next_ = next_;
353 next_ = result;
354
355#ifdef DEBUG
356 Verify();
357 result->Verify();
358#endif
359}
360
361
362// This implements an ordering on live ranges so that they are ordered by their
363// start positions. This is needed for the correctness of the register
364// allocation algorithm. If two live ranges start at the same offset then there
365// is a tie breaker based on where the value is first used. This part of the
366// ordering is merely a heuristic.
367bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
368 LifetimePosition start = Start();
369 LifetimePosition other_start = other->Start();
370 if (start.Value() == other_start.Value()) {
371 UsePosition* pos = FirstPosWithHint();
372 if (pos == NULL) return false;
373 UsePosition* other_pos = other->first_pos();
374 if (other_pos == NULL) return true;
375 return pos->pos().Value() < other_pos->pos().Value();
376 }
377 return start.Value() < other_start.Value();
378}
379
380
381void LiveRange::ShortenTo(LifetimePosition start) {
382 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
383 ASSERT(first_interval_ != NULL);
384 ASSERT(first_interval_->start().Value() <= start.Value());
385 ASSERT(start.Value() < first_interval_->end().Value());
386 first_interval_->set_start(start);
387}
388
389
390void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
391 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
392 id_,
393 start.Value(),
394 end.Value());
395 LifetimePosition new_end = end;
396 while (first_interval_ != NULL &&
397 first_interval_->start().Value() <= end.Value()) {
398 if (first_interval_->end().Value() > end.Value()) {
399 new_end = first_interval_->end();
400 }
401 first_interval_ = first_interval_->next();
402 }
403
404 UseInterval* new_interval = new UseInterval(start, new_end);
405 new_interval->next_ = first_interval_;
406 first_interval_ = new_interval;
407 if (new_interval->next() == NULL) {
408 last_interval_ = new_interval;
409 }
410}
411
412
413void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
414 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
415 id_,
416 start.Value(),
417 end.Value());
418 if (first_interval_ == NULL) {
419 UseInterval* interval = new UseInterval(start, end);
420 first_interval_ = interval;
421 last_interval_ = interval;
422 } else {
423 if (end.Value() == first_interval_->start().Value()) {
424 first_interval_->set_start(start);
425 } else if (end.Value() < first_interval_->start().Value()) {
426 UseInterval* interval = new UseInterval(start, end);
427 interval->set_next(first_interval_);
428 first_interval_ = interval;
429 } else {
430 // Order of instruction's processing (see ProcessInstructions) guarantees
431 // that each new use interval either precedes or intersects with
432 // last added interval.
433 ASSERT(start.Value() < first_interval_->end().Value());
434 first_interval_->start_ = Min(start, first_interval_->start_);
435 first_interval_->end_ = Max(end, first_interval_->end_);
436 }
437 }
438}
439
440
441UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
442 LOperand* operand) {
443 LAllocator::TraceAlloc("Add to live range %d use position %d\n",
444 id_,
445 pos.Value());
446 UsePosition* use_pos = new UsePosition(pos, operand);
447 UsePosition* prev = NULL;
448 UsePosition* current = first_pos_;
449 while (current != NULL && current->pos().Value() < pos.Value()) {
450 prev = current;
451 current = current->next();
452 }
453
454 if (prev == NULL) {
455 use_pos->set_next(first_pos_);
456 first_pos_ = use_pos;
457 } else {
458 use_pos->next_ = prev->next_;
459 prev->next_ = use_pos;
460 }
461
462 return use_pos;
463}
464
465
466void LiveRange::ConvertOperands() {
467 LOperand* op = CreateAssignedOperand();
468 UsePosition* use_pos = first_pos();
469 while (use_pos != NULL) {
470 ASSERT(Start().Value() <= use_pos->pos().Value() &&
471 use_pos->pos().Value() <= End().Value());
472
473 if (use_pos->HasOperand()) {
474 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
475 !use_pos->RequiresRegister());
476 use_pos->operand()->ConvertTo(op->kind(), op->index());
477 }
478 use_pos = use_pos->next();
479 }
480}
481
482
483UsePosition* LiveRange::AddUsePosition(LifetimePosition pos) {
484 return AddUsePosition(pos, CreateAssignedOperand());
485}
486
487
488bool LiveRange::CanCover(LifetimePosition position) const {
489 if (IsEmpty()) return false;
490 return Start().Value() <= position.Value() &&
491 position.Value() < End().Value();
492}
493
494
495bool LiveRange::Covers(LifetimePosition position) {
496 if (!CanCover(position)) return false;
497 UseInterval* start_search = FirstSearchIntervalForPosition(position);
498 for (UseInterval* interval = start_search;
499 interval != NULL;
500 interval = interval->next()) {
501 ASSERT(interval->next() == NULL ||
502 interval->next()->start().Value() >= interval->start().Value());
503 AdvanceLastProcessedMarker(interval, position);
504 if (interval->Contains(position)) return true;
505 if (interval->start().Value() > position.Value()) return false;
506 }
507 return false;
508}
509
510
511LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
512 UseInterval* b = other->first_interval();
513 if (b == NULL) return LifetimePosition::Invalid();
514 LifetimePosition advance_last_processed_up_to = b->start();
515 UseInterval* a = FirstSearchIntervalForPosition(b->start());
516 while (a != NULL && b != NULL) {
517 if (a->start().Value() > other->End().Value()) break;
518 if (b->start().Value() > End().Value()) break;
519 LifetimePosition cur_intersection = a->Intersect(b);
520 if (cur_intersection.IsValid()) {
521 return cur_intersection;
522 }
523 if (a->start().Value() < b->start().Value()) {
524 a = a->next();
525 if (a == NULL || a->start().Value() > other->End().Value()) break;
526 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
527 } else {
528 b = b->next();
529 }
530 }
531 return LifetimePosition::Invalid();
532}
533
534
535void LAllocator::InitializeLivenessAnalysis() {
536 // Initialize the live_in sets for each block to NULL.
537 int block_count = graph()->blocks()->length();
538 live_in_sets_.Initialize(block_count);
539 live_in_sets_.AddBlock(NULL, block_count);
540}
541
542
543BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
544 // Compute live out for the given block, except not including backward
545 // successor edges.
546 BitVector* live_out = new BitVector(next_virtual_register_);
547
548 // Process all successor blocks.
549 HBasicBlock* successor = block->end()->FirstSuccessor();
550 while (successor != NULL) {
551 // Add values live on entry to the successor. Note the successor's
552 // live_in will not be computed yet for backwards edges.
553 BitVector* live_in = live_in_sets_[successor->block_id()];
554 if (live_in != NULL) live_out->Union(*live_in);
555
556 // All phi input operands corresponding to this successor edge are live
557 // out from this block.
558 int index = successor->PredecessorIndexOf(block);
559 const ZoneList<HPhi*>* phis = successor->phis();
560 for (int i = 0; i < phis->length(); ++i) {
561 HPhi* phi = phis->at(i);
562 if (!phi->OperandAt(index)->IsConstant()) {
563 live_out->Add(phi->OperandAt(index)->id());
564 }
565 }
566
567 // Check if we are done with second successor.
568 if (successor == block->end()->SecondSuccessor()) break;
569
570 successor = block->end()->SecondSuccessor();
571 }
572
573 return live_out;
574}
575
576
577void LAllocator::AddInitialIntervals(HBasicBlock* block,
578 BitVector* live_out) {
579 // Add an interval that includes the entire block to the live range for
580 // each live_out value.
581 LifetimePosition start = LifetimePosition::FromInstructionIndex(
582 block->first_instruction_index());
583 LifetimePosition end = LifetimePosition::FromInstructionIndex(
584 block->last_instruction_index()).NextInstruction();
585 BitVector::Iterator iterator(live_out);
586 while (!iterator.Done()) {
587 int operand_index = iterator.Current();
588 LiveRange* range = LiveRangeFor(operand_index);
589 range->AddUseInterval(start, end);
590 iterator.Advance();
591 }
592}
593
594
595int LAllocator::FixedDoubleLiveRangeID(int index) {
596 return -index - 1 - Register::kNumAllocatableRegisters;
597}
598
599
600LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
601 int pos,
602 bool is_tagged) {
603 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
604 ASSERT(operand->HasFixedPolicy());
605 if (operand->policy() == LUnallocated::FIXED_SLOT) {
606 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
607 } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
608 int reg_index = operand->fixed_index();
609 operand->ConvertTo(LOperand::REGISTER, reg_index);
610 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
611 int reg_index = operand->fixed_index();
612 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
613 } else {
614 UNREACHABLE();
615 }
616 if (is_tagged) {
617 TraceAlloc("Fixed reg is tagged at %d\n", pos);
618 LInstruction* instr = chunk_->instructions()->at(pos);
619 if (instr->HasPointerMap()) {
620 instr->pointer_map()->RecordPointer(operand);
621 }
622 }
623 return operand;
624}
625
626
627LiveRange* LAllocator::FixedLiveRangeFor(int index) {
628 if (index >= fixed_live_ranges_.length()) {
629 fixed_live_ranges_.AddBlock(NULL,
630 index - fixed_live_ranges_.length() + 1);
631 }
632
633 LiveRange* result = fixed_live_ranges_[index];
634 if (result == NULL) {
635 result = new LiveRange(FixedLiveRangeID(index));
636 ASSERT(result->IsFixed());
637 result->set_assigned_register(index, GENERAL_REGISTERS);
638 fixed_live_ranges_[index] = result;
639 }
640 return result;
641}
642
643
644LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
645 if (index >= fixed_double_live_ranges_.length()) {
646 fixed_double_live_ranges_.AddBlock(NULL,
647 index - fixed_double_live_ranges_.length() + 1);
648 }
649
650 LiveRange* result = fixed_double_live_ranges_[index];
651 if (result == NULL) {
652 result = new LiveRange(FixedDoubleLiveRangeID(index));
653 ASSERT(result->IsFixed());
654 result->set_assigned_register(index, DOUBLE_REGISTERS);
655 fixed_double_live_ranges_[index] = result;
656 }
657 return result;
658}
659
660LiveRange* LAllocator::LiveRangeFor(int index) {
661 if (index >= live_ranges_.length()) {
662 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
663 }
664 LiveRange* result = live_ranges_[index];
665 if (result == NULL) {
666 result = new LiveRange(index);
667 live_ranges_[index] = result;
668 }
669 return result;
670}
671
672
673LGap* LAllocator::GetLastGap(HBasicBlock* block) const {
674 int last_instruction = block->last_instruction_index();
675 int index = chunk_->NearestGapPos(last_instruction);
676 return chunk_->GetGapAt(index);
677}
678
679
680HPhi* LAllocator::LookupPhi(LOperand* operand) const {
681 if (!operand->IsUnallocated()) return NULL;
682 int index = operand->VirtualRegister();
683 HValue* instr = graph()->LookupValue(index);
684 if (instr != NULL && instr->IsPhi()) {
685 return HPhi::cast(instr);
686 }
687 return NULL;
688}
689
690
691LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
692 if (operand->IsUnallocated()) {
693 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
694 } else if (operand->IsRegister()) {
695 return FixedLiveRangeFor(operand->index());
696 } else if (operand->IsDoubleRegister()) {
697 return FixedDoubleLiveRangeFor(operand->index());
698 } else {
699 return NULL;
700 }
701}
702
703
704void LAllocator::Define(LifetimePosition position,
705 LOperand* operand,
706 LOperand* hint) {
707 LiveRange* range = LiveRangeFor(operand);
708 if (range == NULL) return;
709
710 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
711 // Can happen if there is a definition without use.
712 range->AddUseInterval(position, position.NextInstruction());
713 range->AddUsePosition(position.NextInstruction(), NULL);
714 } else {
715 range->ShortenTo(position);
716 }
717
718 if (operand->IsUnallocated()) {
719 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
720 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
721 }
722}
723
724
725void LAllocator::Use(LifetimePosition block_start,
726 LifetimePosition position,
727 LOperand* operand,
728 LOperand* hint) {
729 LiveRange* range = LiveRangeFor(operand);
730 if (range == NULL) return;
731 if (operand->IsUnallocated()) {
732 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
733 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
734 }
735 range->AddUseInterval(block_start, position);
736}
737
738
739void LAllocator::AddConstraintsGapMove(int index,
740 LOperand* from,
741 LOperand* to) {
742 LGap* gap = chunk_->GetGapAt(index);
743 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
744 if (from->IsUnallocated()) {
745 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
746 for (int i = 0; i < move_operands->length(); ++i) {
747 LMoveOperands cur = move_operands->at(i);
748 LOperand* cur_to = cur.to();
749 if (cur_to->IsUnallocated()) {
750 if (cur_to->VirtualRegister() == from->VirtualRegister()) {
751 move->AddMove(cur.from(), to);
752 return;
753 }
754 }
755 }
756 }
757 move->AddMove(from, to);
758}
759
760
761void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
762 int start = block->first_instruction_index();
763 int end = block->last_instruction_index();
764 for (int i = start; i <= end; ++i) {
765 if (chunk_->IsGapAt(i)) {
766 InstructionSummary* summary = NULL;
767 InstructionSummary* prev_summary = NULL;
768 if (i < end) summary = GetSummary(i + 1);
769 if (i > start) prev_summary = GetSummary(i - 1);
770 MeetConstraintsBetween(prev_summary, summary, i);
771 }
772 }
773}
774
775
776void LAllocator::MeetConstraintsBetween(InstructionSummary* first,
777 InstructionSummary* second,
778 int gap_index) {
779 // Handle fixed temporaries.
780 if (first != NULL) {
781 for (int i = 0; i < first->TempCount(); ++i) {
782 LUnallocated* temp = LUnallocated::cast(first->TempAt(i));
783 if (temp->HasFixedPolicy()) {
784 AllocateFixed(temp, gap_index - 1, false);
785 }
786 }
787 }
788
789 // Handle fixed output operand.
790 if (first != NULL && first->Output() != NULL) {
791 LUnallocated* first_output = LUnallocated::cast(first->Output());
792 LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
793 bool assigned = false;
794 if (first_output->HasFixedPolicy()) {
795 LUnallocated* output_copy = first_output->CopyUnconstrained();
796 bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
797 AllocateFixed(first_output, gap_index, is_tagged);
798
799 // This value is produced on the stack, we never need to spill it.
800 if (first_output->IsStackSlot()) {
801 range->SetSpillOperand(first_output);
802 range->SetSpillStartIndex(gap_index - 1);
803 assigned = true;
804 }
805 chunk_->AddGapMove(gap_index, first_output, output_copy);
806 }
807
808 if (!assigned) {
809 range->SetSpillStartIndex(gap_index);
810
811 // This move to spill operand is not a real use. Liveness analysis
812 // and splitting of live ranges do not account for it.
813 // Thus it should be inserted to a lifetime position corresponding to
814 // the instruction end.
815 LGap* gap = chunk_->GetGapAt(gap_index);
816 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
817 move->AddMove(first_output, range->GetSpillOperand());
818 }
819 }
820
821 // Handle fixed input operands of second instruction.
822 if (second != NULL) {
823 for (int i = 0; i < second->InputCount(); ++i) {
824 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i));
825 if (cur_input->HasFixedPolicy()) {
826 LUnallocated* input_copy = cur_input->CopyUnconstrained();
827 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
828 AllocateFixed(cur_input, gap_index + 1, is_tagged);
829 AddConstraintsGapMove(gap_index, input_copy, cur_input);
830 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
831 LUnallocated* input_copy = cur_input->CopyUnconstrained();
832 cur_input->set_virtual_register(next_virtual_register_++);
Steve Block9fac8402011-05-12 15:51:54 +0100833
834 if (RequiredRegisterKind(input_copy->virtual_register()) ==
835 DOUBLE_REGISTERS) {
836 double_artificial_registers_.Add(
837 cur_input->virtual_register() - first_artificial_register_);
838 }
839
Ben Murdochb0fe1622011-05-05 13:52:32 +0100840 second->AddTemp(cur_input);
841 AddConstraintsGapMove(gap_index, input_copy, cur_input);
842 }
843 }
844 }
845
846 // Handle "output same as input" for second instruction.
847 if (second != NULL && second->Output() != NULL) {
848 LUnallocated* second_output = LUnallocated::cast(second->Output());
849 if (second_output->HasSameAsInputPolicy()) {
850 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0));
851 int output_vreg = second_output->VirtualRegister();
852 int input_vreg = cur_input->VirtualRegister();
853
854 LUnallocated* input_copy = cur_input->CopyUnconstrained();
855 cur_input->set_virtual_register(second_output->virtual_register());
856 AddConstraintsGapMove(gap_index, input_copy, cur_input);
857
858 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
859 int index = gap_index + 1;
860 LInstruction* instr = chunk_->instructions()->at(index);
861 if (instr->HasPointerMap()) {
862 instr->pointer_map()->RecordPointer(input_copy);
863 }
864 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
865 // The input is assumed to immediately have a tagged representation,
866 // before the pointer map can be used. I.e. the pointer map at the
867 // instruction will include the output operand (whose value at the
868 // beginning of the instruction is equal to the input operand). If
869 // this is not desired, then the pointer map at this instruction needs
870 // to be adjusted manually.
871 }
872 }
873 }
874}
875
876
877void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
878 int block_start = block->first_instruction_index();
879 int index = block->last_instruction_index();
880
881 LifetimePosition block_start_position =
882 LifetimePosition::FromInstructionIndex(block_start);
883
884 while (index >= block_start) {
885 LifetimePosition curr_position =
886 LifetimePosition::FromInstructionIndex(index);
887
888 if (chunk_->IsGapAt(index)) {
889 // We have a gap at this position.
890 LGap* gap = chunk_->GetGapAt(index);
891 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
892 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
893 for (int i = 0; i < move_operands->length(); ++i) {
894 LMoveOperands* cur = &move_operands->at(i);
895 if (cur->IsIgnored()) continue;
896 LOperand* from = cur->from();
897 LOperand* to = cur->to();
898 HPhi* phi = LookupPhi(to);
899 LOperand* hint = to;
900 if (phi != NULL) {
901 // This is a phi resolving move.
902 if (!phi->block()->IsLoopHeader()) {
903 hint = LiveRangeFor(phi->id())->FirstHint();
904 }
905 } else {
906 if (to->IsUnallocated()) {
907 if (live->Contains(to->VirtualRegister())) {
908 Define(curr_position, to, from);
909 live->Remove(to->VirtualRegister());
910 } else {
911 cur->Eliminate();
912 continue;
913 }
914 } else {
915 Define(curr_position, to, from);
916 }
917 }
918 Use(block_start_position, curr_position, from, hint);
919 if (from->IsUnallocated()) {
920 live->Add(from->VirtualRegister());
921 }
922 }
923 } else {
924 ASSERT(!chunk_->IsGapAt(index));
925 InstructionSummary* summary = GetSummary(index);
926
927 if (summary != NULL) {
928 LOperand* output = summary->Output();
929 if (output != NULL) {
930 if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
931 Define(curr_position, output, NULL);
932 }
933
934 if (summary->IsCall()) {
935 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
936 if (output == NULL || !output->IsRegister() ||
937 output->index() != i) {
938 LiveRange* range = FixedLiveRangeFor(i);
939 range->AddUseInterval(curr_position,
940 curr_position.InstructionEnd());
941 }
942 }
943 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
944 if (output == NULL || !output->IsDoubleRegister() ||
945 output->index() != i) {
946 LiveRange* range = FixedDoubleLiveRangeFor(i);
947 range->AddUseInterval(curr_position,
948 curr_position.InstructionEnd());
949 }
950 }
951 }
952
953 for (int i = 0; i < summary->InputCount(); ++i) {
954 LOperand* input = summary->InputAt(i);
955
956 LifetimePosition use_pos;
957 if (input->IsUnallocated() &&
958 LUnallocated::cast(input)->IsUsedAtStart()) {
959 use_pos = curr_position;
960 } else {
961 use_pos = curr_position.InstructionEnd();
962 }
963
964 Use(block_start_position, use_pos, input, NULL);
965 if (input->IsUnallocated()) live->Add(input->VirtualRegister());
966 }
967
968 for (int i = 0; i < summary->TempCount(); ++i) {
969 LOperand* temp = summary->TempAt(i);
970 if (summary->IsCall()) {
971 if (temp->IsRegister()) continue;
972 if (temp->IsUnallocated()) {
973 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
974 if (temp_unalloc->HasFixedPolicy()) {
975 continue;
976 }
977 }
978 }
979 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
980 Define(curr_position, temp, NULL);
981 }
982 }
983 }
984
985 index = index - 1;
986 }
987}
988
989
990void LAllocator::ResolvePhis(HBasicBlock* block) {
991 const ZoneList<HPhi*>* phis = block->phis();
992 for (int i = 0; i < phis->length(); ++i) {
993 HPhi* phi = phis->at(i);
994 LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
995 phi_operand->set_virtual_register(phi->id());
996 for (int j = 0; j < phi->OperandCount(); ++j) {
997 HValue* op = phi->OperandAt(j);
998 LOperand* operand = NULL;
999 if (op->IsConstant() && op->EmitAtUses()) {
1000 HConstant* constant = HConstant::cast(op);
1001 operand = chunk_->DefineConstantOperand(constant);
1002 } else {
1003 ASSERT(!op->EmitAtUses());
1004 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
1005 unalloc->set_virtual_register(op->id());
1006 operand = unalloc;
1007 }
1008 HBasicBlock* cur_block = block->predecessors()->at(j);
1009 // The gap move must be added without any special processing as in
1010 // the AddConstraintsGapMove.
1011 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1012 operand,
1013 phi_operand);
1014 }
1015
1016 LiveRange* live_range = LiveRangeFor(phi->id());
1017 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1018 label->GetOrCreateParallelMove(LGap::START)->
1019 AddMove(phi_operand, live_range->GetSpillOperand());
1020 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1021 }
1022}
1023
1024
1025void LAllocator::Allocate(LChunk* chunk) {
1026 ASSERT(chunk_ == NULL);
1027 chunk_ = chunk;
1028 MeetRegisterConstraints();
1029 ResolvePhis();
1030 BuildLiveRanges();
1031 AllocateGeneralRegisters();
1032 AllocateDoubleRegisters();
1033 PopulatePointerMaps();
1034 if (has_osr_entry_) ProcessOsrEntry();
1035 ConnectRanges();
1036 ResolveControlFlow();
1037}
1038
1039
1040void LAllocator::MeetRegisterConstraints() {
1041 HPhase phase("Register constraints", chunk());
Steve Block9fac8402011-05-12 15:51:54 +01001042 first_artificial_register_ = next_virtual_register_;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001043 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1044 for (int i = 0; i < blocks->length(); ++i) {
1045 HBasicBlock* block = blocks->at(i);
1046 MeetRegisterConstraints(block);
1047 }
1048}
1049
1050
1051void LAllocator::ResolvePhis() {
1052 HPhase phase("Resolve phis", chunk());
1053
1054 // Process the blocks in reverse order.
1055 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1056 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1057 HBasicBlock* block = blocks->at(block_id);
1058 ResolvePhis(block);
1059 }
1060}
1061
1062
1063void LAllocator::ResolveControlFlow(LiveRange* range,
1064 HBasicBlock* block,
1065 HBasicBlock* pred) {
1066 LifetimePosition pred_end =
1067 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()).
1068 PrevInstruction();
1069
1070 LifetimePosition cur_start =
1071 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1072 LiveRange* pred_cover = NULL;
1073 LiveRange* cur_cover = NULL;
1074 LiveRange* cur_range = range;
1075 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1076 if (cur_range->CanCover(cur_start)) {
1077 ASSERT(cur_cover == NULL);
1078 cur_cover = cur_range;
1079 }
1080 if (cur_range->CanCover(pred_end)) {
1081 ASSERT(pred_cover == NULL);
1082 pred_cover = cur_range;
1083 }
1084 cur_range = cur_range->next();
1085 }
1086
1087 if (cur_cover->IsSpilled()) return;
1088 ASSERT(pred_cover != NULL && cur_cover != NULL);
1089 if (pred_cover != cur_cover) {
1090 LOperand* pred_op = pred_cover->CreateAssignedOperand();
1091 LOperand* cur_op = cur_cover->CreateAssignedOperand();
1092 if (!pred_op->Equals(cur_op)) {
1093 LGap* gap = NULL;
1094 if (block->predecessors()->length() == 1) {
1095 gap = chunk_->GetGapAt(block->first_instruction_index());
1096 } else {
1097 ASSERT(pred->end()->SecondSuccessor() == NULL);
1098 gap = GetLastGap(pred);
1099 }
1100 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1101 }
1102 }
1103}
1104
1105
1106LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1107 int index = pos.InstructionIndex();
1108 if (chunk_->IsGapAt(index)) {
1109 LGap* gap = chunk_->GetGapAt(index);
1110 return gap->GetOrCreateParallelMove(
1111 pos.IsInstructionStart() ? LGap::START : LGap::END);
1112 }
1113 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1114 return chunk_->GetGapAt(gap_pos)->GetOrCreateParallelMove(
1115 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1116}
1117
1118
1119HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1120 LGap* gap = chunk_->GetGapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1121 return gap->block();
1122}
1123
1124
1125void LAllocator::ConnectRanges() {
1126 HPhase phase("Connect ranges", this);
1127 for (int i = 0; i < live_ranges()->length(); ++i) {
1128 LiveRange* first_range = live_ranges()->at(i);
1129 if (first_range == NULL || first_range->parent() != NULL) continue;
1130
1131 LiveRange* second_range = first_range->next();
1132 while (second_range != NULL) {
1133 LifetimePosition pos = second_range->Start();
1134
1135 if (!second_range->IsSpilled()) {
1136 // Add gap move if the two live ranges touch and there is no block
1137 // boundary.
1138 if (first_range->End().Value() == pos.Value()) {
1139 bool should_insert = true;
1140 if (IsBlockBoundary(pos)) {
1141 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1142 }
1143 if (should_insert) {
1144 LParallelMove* move = GetConnectingParallelMove(pos);
1145 LOperand* prev_operand = first_range->CreateAssignedOperand();
1146 LOperand* cur_operand = second_range->CreateAssignedOperand();
1147 move->AddMove(prev_operand, cur_operand);
1148 }
1149 }
1150 }
1151
1152 first_range = second_range;
1153 second_range = second_range->next();
1154 }
1155 }
1156}
1157
1158
1159bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1160 if (block->predecessors()->length() != 1) return false;
1161 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1162}
1163
1164
1165void LAllocator::ResolveControlFlow() {
1166 HPhase phase("Resolve control flow", this);
1167 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1168 for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1169 HBasicBlock* block = blocks->at(block_id);
1170 if (CanEagerlyResolveControlFlow(block)) continue;
1171 BitVector* live = live_in_sets_[block->block_id()];
1172 BitVector::Iterator iterator(live);
1173 while (!iterator.Done()) {
1174 int operand_index = iterator.Current();
1175 for (int i = 0; i < block->predecessors()->length(); ++i) {
1176 HBasicBlock* cur = block->predecessors()->at(i);
1177 LiveRange* cur_range = LiveRangeFor(operand_index);
1178 ResolveControlFlow(cur_range, block, cur);
1179 }
1180 iterator.Advance();
1181 }
1182 }
1183}
1184
1185
1186void LAllocator::BuildLiveRanges() {
1187 HPhase phase("Build live ranges", this);
1188 InitializeLivenessAnalysis();
1189 // Process the blocks in reverse order.
1190 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
1191 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1192 HBasicBlock* block = blocks->at(block_id);
1193 BitVector* live = ComputeLiveOut(block);
1194 // Initially consider all live_out values live for the entire block. We
1195 // will shorten these intervals if necessary.
1196 AddInitialIntervals(block, live);
1197
1198 // Process the instructions in reverse order, generating and killing
1199 // live values.
1200 ProcessInstructions(block, live);
1201 // All phi output operands are killed by this block.
1202 const ZoneList<HPhi*>* phis = block->phis();
1203 for (int i = 0; i < phis->length(); ++i) {
1204 // The live range interval already ends at the first instruction of the
1205 // block.
1206 HPhi* phi = phis->at(i);
1207 live->Remove(phi->id());
1208
1209 LOperand* hint = NULL;
1210 LOperand* phi_operand = NULL;
1211 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1212 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1213 for (int j = 0; j < move->move_operands()->length(); ++j) {
1214 LOperand* to = move->move_operands()->at(j).to();
1215 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1216 hint = move->move_operands()->at(j).from();
1217 phi_operand = to;
1218 break;
1219 }
1220 }
1221 ASSERT(hint != NULL);
1222
1223 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1224 block->first_instruction_index());
1225 Define(block_start, phi_operand, hint);
1226 }
1227
1228 // Now live is live_in for this block except not including values live
1229 // out on backward successor edges.
1230 live_in_sets_[block_id] = live;
1231
1232 // If this block is a loop header go back and patch up the necessary
1233 // predecessor blocks.
1234 if (block->IsLoopHeader()) {
1235 // TODO(kmillikin): Need to be able to get the last block of the loop
1236 // in the loop information. Add a live range stretching from the first
1237 // loop instruction to the last for each value live on entry to the
1238 // header.
1239 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1240 BitVector::Iterator iterator(live);
1241 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1242 block->first_instruction_index());
1243 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1244 back_edge->last_instruction_index());
1245 while (!iterator.Done()) {
1246 int operand_index = iterator.Current();
1247 LiveRange* range = LiveRangeFor(operand_index);
1248 range->EnsureInterval(start, end);
1249 iterator.Advance();
1250 }
1251
1252 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1253 live_in_sets_[i]->Union(*live);
1254 }
1255 }
1256
1257#ifdef DEBUG
1258 if (block_id == 0) {
1259 BitVector::Iterator iterator(live);
1260 bool found = false;
1261 while (!iterator.Done()) {
1262 found = true;
1263 int operand_index = iterator.Current();
1264 PrintF("Function: %s\n",
1265 *graph()->info()->function()->debug_name()->ToCString());
1266 PrintF("Value %d used before first definition!\n", operand_index);
1267 LiveRange* range = LiveRangeFor(operand_index);
1268 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1269 iterator.Advance();
1270 }
1271 ASSERT(!found);
1272 }
1273#endif
1274 }
1275}
1276
1277
1278bool LAllocator::SafePointsAreInOrder() const {
1279 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1280 int safe_point = 0;
1281 for (int i = 0; i < pointer_maps->length(); ++i) {
1282 LPointerMap* map = pointer_maps->at(i);
1283 if (safe_point > map->lithium_position()) return false;
1284 safe_point = map->lithium_position();
1285 }
1286 return true;
1287}
1288
1289
1290void LAllocator::PopulatePointerMaps() {
1291 HPhase phase("Populate pointer maps", this);
1292 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1293
1294 ASSERT(SafePointsAreInOrder());
1295
1296 // Iterate over all safe point positions and record a pointer
1297 // for all spilled live ranges at this point.
1298 int first_safe_point_index = 0;
1299 int last_range_start = 0;
1300 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1301 LiveRange* range = live_ranges()->at(range_idx);
1302 if (range == NULL) continue;
1303 // Iterate over the first parts of multi-part live ranges.
1304 if (range->parent() != NULL) continue;
1305 // Skip non-pointer values.
1306 if (!HasTaggedValue(range->id())) continue;
1307 // Skip empty live ranges.
1308 if (range->IsEmpty()) continue;
1309
1310 // Find the extent of the range and its children.
1311 int start = range->Start().InstructionIndex();
1312 int end = 0;
1313 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1314 LifetimePosition this_end = cur->End();
1315 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1316 ASSERT(cur->Start().InstructionIndex() >= start);
1317 }
1318
1319 // Most of the ranges are in order, but not all. Keep an eye on when
1320 // they step backwards and reset the first_safe_point_index so we don't
1321 // miss any safe points.
1322 if (start < last_range_start) {
1323 first_safe_point_index = 0;
1324 }
1325 last_range_start = start;
1326
1327 // Step across all the safe points that are before the start of this range,
1328 // recording how far we step in order to save doing this for the next range.
1329 while (first_safe_point_index < pointer_maps->length()) {
1330 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1331 int safe_point = map->lithium_position();
1332 if (safe_point >= start) break;
1333 first_safe_point_index++;
1334 }
1335
1336 // Step through the safe points to see whether they are in the range.
1337 for (int safe_point_index = first_safe_point_index;
1338 safe_point_index < pointer_maps->length();
1339 ++safe_point_index) {
1340 LPointerMap* map = pointer_maps->at(safe_point_index);
1341 int safe_point = map->lithium_position();
1342
1343 // The safe points are sorted so we can stop searching here.
1344 if (safe_point - 1 > end) break;
1345
1346 // Advance to the next active range that covers the current
1347 // safe point position.
1348 LifetimePosition safe_point_pos =
1349 LifetimePosition::FromInstructionIndex(safe_point);
1350 LiveRange* cur = range;
1351 while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1352 cur = cur->next();
1353 }
1354 if (cur == NULL) continue;
1355
1356 // Check if the live range is spilled and the safe point is after
1357 // the spill position.
1358 if (range->HasAllocatedSpillOperand() &&
1359 safe_point >= range->spill_start_index()) {
1360 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1361 range->id(), range->spill_start_index(), safe_point);
1362 map->RecordPointer(range->GetSpillOperand());
1363 }
1364
1365 if (!cur->IsSpilled()) {
1366 TraceAlloc("Pointer in register for range %d (start at %d) "
1367 "at safe point %d\n",
1368 cur->id(), cur->Start().Value(), safe_point);
1369 LOperand* operand = cur->CreateAssignedOperand();
1370 ASSERT(!operand->IsStackSlot());
1371 map->RecordPointer(operand);
1372 }
1373 }
1374 }
1375}
1376
1377
1378void LAllocator::ProcessOsrEntry() {
1379 const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1380
1381 // Linear search for the OSR entry instruction in the chunk.
1382 int index = -1;
1383 while (++index < instrs->length() &&
1384 !instrs->at(index)->IsOsrEntry()) {
1385 }
1386 ASSERT(index < instrs->length());
1387 LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1388
1389 LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1390 for (int i = 0; i < live_ranges()->length(); ++i) {
1391 LiveRange* range = live_ranges()->at(i);
1392 if (range != NULL) {
1393 if (range->Covers(position) &&
1394 range->HasRegisterAssigned() &&
1395 range->TopLevel()->HasAllocatedSpillOperand()) {
1396 int reg_index = range->assigned_register();
1397 LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1398 if (range->IsDouble()) {
1399 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1400 } else {
1401 instruction->MarkSpilledRegister(reg_index, spill_operand);
1402 }
1403 }
1404 }
1405 }
1406}
1407
1408
1409void LAllocator::AllocateGeneralRegisters() {
1410 HPhase phase("Allocate general registers", this);
1411 num_registers_ = Register::kNumAllocatableRegisters;
1412 mode_ = GENERAL_REGISTERS;
1413 AllocateRegisters();
1414}
1415
1416
1417void LAllocator::AllocateDoubleRegisters() {
1418 HPhase phase("Allocate double registers", this);
1419 num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1420 mode_ = DOUBLE_REGISTERS;
1421 AllocateRegisters();
1422}
1423
1424
1425void LAllocator::AllocateRegisters() {
1426 ASSERT(mode_ != NONE);
1427 reusable_slots_.Clear();
1428
1429 for (int i = 0; i < live_ranges_.length(); ++i) {
1430 if (live_ranges_[i] != NULL) {
1431 if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1432 AddToUnhandledUnsorted(live_ranges_[i]);
1433 }
1434 }
1435 }
1436 SortUnhandled();
1437 ASSERT(UnhandledIsSorted());
1438
1439 ASSERT(active_live_ranges_.is_empty());
1440 ASSERT(inactive_live_ranges_.is_empty());
1441
1442 if (mode_ == DOUBLE_REGISTERS) {
1443 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1444 LiveRange* current = fixed_double_live_ranges_.at(i);
1445 if (current != NULL) {
1446 AddToInactive(current);
1447 }
1448 }
1449 } else {
1450 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1451 LiveRange* current = fixed_live_ranges_.at(i);
1452 if (current != NULL) {
1453 AddToInactive(current);
1454 }
1455 }
1456 }
1457
1458 while (!unhandled_live_ranges_.is_empty()) {
1459 ASSERT(UnhandledIsSorted());
1460 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1461 ASSERT(UnhandledIsSorted());
1462 LifetimePosition position = current->Start();
1463 TraceAlloc("Processing interval %d start=%d\n",
1464 current->id(),
1465 position.Value());
1466
1467 if (current->HasAllocatedSpillOperand()) {
1468 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1469 LifetimePosition next_pos = position;
1470 if (chunk_->IsGapAt(next_pos.InstructionIndex())) {
1471 next_pos = next_pos.NextInstruction();
1472 }
1473 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1474 // If the range already has a spill operand and it doesn't need a
1475 // register immediately, split it and spill the first part of the range.
1476 if (pos == NULL) {
1477 Spill(current);
1478 continue;
1479 } else if (pos->pos().Value() >
1480 current->Start().NextInstruction().Value()) {
1481 // Do not spill live range eagerly if use position that can benefit from
1482 // the register is too close to the start of live range.
1483 SpillBetween(current, current->Start(), pos->pos());
1484 ASSERT(UnhandledIsSorted());
1485 continue;
1486 }
1487 }
1488
1489 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1490 LiveRange* cur_active = active_live_ranges_.at(i);
1491 if (cur_active->End().Value() <= position.Value()) {
1492 ActiveToHandled(cur_active);
1493 --i; // The live range was removed from the list of active live ranges.
1494 } else if (!cur_active->Covers(position)) {
1495 ActiveToInactive(cur_active);
1496 --i; // The live range was removed from the list of active live ranges.
1497 }
1498 }
1499
1500 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1501 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1502 if (cur_inactive->End().Value() <= position.Value()) {
1503 InactiveToHandled(cur_inactive);
1504 --i; // Live range was removed from the list of inactive live ranges.
1505 } else if (cur_inactive->Covers(position)) {
1506 InactiveToActive(cur_inactive);
1507 --i; // Live range was removed from the list of inactive live ranges.
1508 }
1509 }
1510
1511 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1512
1513 bool result = TryAllocateFreeReg(current);
1514 if (!result) {
1515 AllocateBlockedReg(current);
1516 }
1517
1518 if (current->HasRegisterAssigned()) {
1519 AddToActive(current);
1520 }
1521 }
1522
1523 active_live_ranges_.Clear();
1524 inactive_live_ranges_.Clear();
1525}
1526
1527
1528void LAllocator::Setup() {
1529 LConstantOperand::SetupCache();
1530 LStackSlot::SetupCache();
1531 LDoubleStackSlot::SetupCache();
1532 LRegister::SetupCache();
1533 LDoubleRegister::SetupCache();
1534}
1535
1536
1537const char* LAllocator::RegisterName(int allocation_index) {
1538 ASSERT(mode_ != NONE);
1539 if (mode_ == GENERAL_REGISTERS) {
1540 return Register::AllocationIndexToString(allocation_index);
1541 } else {
1542 return DoubleRegister::AllocationIndexToString(allocation_index);
1543 }
1544}
1545
1546
1547void LAllocator::TraceAlloc(const char* msg, ...) {
1548 if (FLAG_trace_alloc) {
1549 va_list arguments;
1550 va_start(arguments, msg);
1551 OS::VPrint(msg, arguments);
1552 va_end(arguments);
1553 }
1554}
1555
1556
1557void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1558 operand->set_virtual_register(value->id());
1559 current_summary()->AddInput(operand);
1560}
1561
1562
1563bool LAllocator::HasTaggedValue(int virtual_register) const {
1564 HValue* value = graph()->LookupValue(virtual_register);
1565 if (value == NULL) return false;
1566 return value->representation().IsTagged();
1567}
1568
1569
1570RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
Steve Block9fac8402011-05-12 15:51:54 +01001571 if (virtual_register < first_artificial_register_) {
1572 HValue* value = graph()->LookupValue(virtual_register);
1573 if (value != NULL && value->representation().IsDouble()) {
1574 return DOUBLE_REGISTERS;
1575 }
1576 } else if (double_artificial_registers_.Contains(
1577 virtual_register - first_artificial_register_)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001578 return DOUBLE_REGISTERS;
1579 }
Steve Block9fac8402011-05-12 15:51:54 +01001580
Ben Murdochb0fe1622011-05-05 13:52:32 +01001581 return GENERAL_REGISTERS;
1582}
1583
1584
1585void LAllocator::MarkAsCall() {
Steve Block9fac8402011-05-12 15:51:54 +01001586 // Call instructions can use only fixed registers as
1587 // temporaries and outputs because all registers
1588 // are blocked by the calling convention.
1589 // Inputs can use either fixed register or have a short lifetime (be
1590 // used at start of the instruction).
1591 InstructionSummary* summary = current_summary();
1592#ifdef DEBUG
1593 ASSERT(summary->Output() == NULL ||
1594 LUnallocated::cast(summary->Output())->HasFixedPolicy() ||
1595 !LUnallocated::cast(summary->Output())->HasRegisterPolicy());
1596 for (int i = 0; i < summary->InputCount(); i++) {
1597 ASSERT(LUnallocated::cast(summary->InputAt(i))->HasFixedPolicy() ||
1598 LUnallocated::cast(summary->InputAt(i))->IsUsedAtStart() ||
1599 !LUnallocated::cast(summary->InputAt(i))->HasRegisterPolicy());
1600 }
1601 for (int i = 0; i < summary->TempCount(); i++) {
1602 ASSERT(LUnallocated::cast(summary->TempAt(i))->HasFixedPolicy() ||
1603 !LUnallocated::cast(summary->TempAt(i))->HasRegisterPolicy());
1604 }
1605#endif
1606 summary->MarkAsCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001607}
1608
1609
1610void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1611 operand->set_virtual_register(instr->id());
1612 current_summary()->SetOutput(operand);
1613}
1614
1615
1616void LAllocator::RecordTemporary(LUnallocated* operand) {
1617 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
1618 if (!operand->HasFixedPolicy()) {
1619 operand->set_virtual_register(next_virtual_register_++);
1620 }
1621 current_summary()->AddTemp(operand);
1622}
1623
1624
1625int LAllocator::max_initial_value_ids() {
1626 return LUnallocated::kMaxVirtualRegisters / 32;
1627}
1628
1629
1630void LAllocator::BeginInstruction() {
1631 if (next_summary_ == NULL) {
1632 next_summary_ = new InstructionSummary();
1633 }
1634 summary_stack_.Add(next_summary_);
1635 next_summary_ = NULL;
1636}
1637
1638
1639void LAllocator::SummarizeInstruction(int index) {
1640 InstructionSummary* sum = summary_stack_.RemoveLast();
1641 if (summaries_.length() <= index) {
1642 summaries_.AddBlock(NULL, index + 1 - summaries_.length());
1643 }
1644 ASSERT(summaries_[index] == NULL);
1645 if (sum->Output() != NULL || sum->InputCount() > 0 || sum->TempCount() > 0) {
1646 summaries_[index] = sum;
1647 } else {
1648 next_summary_ = sum;
1649 }
1650}
1651
1652
1653void LAllocator::OmitInstruction() {
1654 summary_stack_.RemoveLast();
1655}
1656
1657
1658void LAllocator::AddToActive(LiveRange* range) {
1659 TraceAlloc("Add live range %d to active\n", range->id());
1660 active_live_ranges_.Add(range);
1661}
1662
1663
1664void LAllocator::AddToInactive(LiveRange* range) {
1665 TraceAlloc("Add live range %d to inactive\n", range->id());
1666 inactive_live_ranges_.Add(range);
1667}
1668
1669
1670void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1671 if (range == NULL || range->IsEmpty()) return;
1672 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1673 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1674 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1675 if (range->ShouldBeAllocatedBefore(cur_range)) {
1676 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1677 unhandled_live_ranges_.InsertAt(i + 1, range);
1678 ASSERT(UnhandledIsSorted());
1679 return;
1680 }
1681 }
1682 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1683 unhandled_live_ranges_.InsertAt(0, range);
1684 ASSERT(UnhandledIsSorted());
1685}
1686
1687
1688void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1689 if (range == NULL || range->IsEmpty()) return;
1690 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1691 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1692 unhandled_live_ranges_.Add(range);
1693}
1694
1695
1696static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1697 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1698 !(*b)->ShouldBeAllocatedBefore(*a));
1699 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1700 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1701 return (*a)->id() - (*b)->id();
1702}
1703
1704
1705// Sort the unhandled live ranges so that the ranges to be processed first are
1706// at the end of the array list. This is convenient for the register allocation
1707// algorithm because it is efficient to remove elements from the end.
1708void LAllocator::SortUnhandled() {
1709 TraceAlloc("Sort unhandled\n");
1710 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1711}
1712
1713
1714bool LAllocator::UnhandledIsSorted() {
1715 int len = unhandled_live_ranges_.length();
1716 for (int i = 1; i < len; i++) {
1717 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1718 LiveRange* b = unhandled_live_ranges_.at(i);
1719 if (a->Start().Value() < b->Start().Value()) return false;
1720 }
1721 return true;
1722}
1723
1724
1725void LAllocator::FreeSpillSlot(LiveRange* range) {
1726 // Check that we are the last range.
1727 if (range->next() != NULL) return;
1728
1729 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1730
1731 int index = range->TopLevel()->GetSpillOperand()->index();
1732 if (index >= 0) {
1733 reusable_slots_.Add(range);
1734 }
1735}
1736
1737
1738LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1739 if (reusable_slots_.is_empty()) return NULL;
1740 if (reusable_slots_.first()->End().Value() >
1741 range->TopLevel()->Start().Value()) {
1742 return NULL;
1743 }
1744 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1745 reusable_slots_.Remove(0);
1746 return result;
1747}
1748
1749
1750void LAllocator::ActiveToHandled(LiveRange* range) {
1751 ASSERT(active_live_ranges_.Contains(range));
1752 active_live_ranges_.RemoveElement(range);
1753 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1754 FreeSpillSlot(range);
1755}
1756
1757
1758void LAllocator::ActiveToInactive(LiveRange* range) {
1759 ASSERT(active_live_ranges_.Contains(range));
1760 active_live_ranges_.RemoveElement(range);
1761 inactive_live_ranges_.Add(range);
1762 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1763}
1764
1765
1766void LAllocator::InactiveToHandled(LiveRange* range) {
1767 ASSERT(inactive_live_ranges_.Contains(range));
1768 inactive_live_ranges_.RemoveElement(range);
1769 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1770 FreeSpillSlot(range);
1771}
1772
1773
1774void LAllocator::InactiveToActive(LiveRange* range) {
1775 ASSERT(inactive_live_ranges_.Contains(range));
1776 inactive_live_ranges_.RemoveElement(range);
1777 active_live_ranges_.Add(range);
1778 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1779}
1780
1781
1782// TryAllocateFreeReg and AllocateBlockedReg assume this
1783// when allocating local arrays.
1784STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1785 Register::kNumAllocatableRegisters);
1786
1787
1788bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1789 LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1790
1791 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1792 free_until_pos[i] = LifetimePosition::MaxPosition();
1793 }
1794
1795 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1796 LiveRange* cur_active = active_live_ranges_.at(i);
1797 free_until_pos[cur_active->assigned_register()] =
1798 LifetimePosition::FromInstructionIndex(0);
1799 }
1800
1801 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1802 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1803 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1804 LifetimePosition next_intersection =
1805 cur_inactive->FirstIntersection(current);
1806 if (!next_intersection.IsValid()) continue;
1807 int cur_reg = cur_inactive->assigned_register();
1808 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1809 }
1810
1811 UsePosition* hinted_use = current->FirstPosWithHint();
1812 if (hinted_use != NULL) {
1813 LOperand* hint = hinted_use->hint();
1814 if (hint->IsRegister() || hint->IsDoubleRegister()) {
1815 int register_index = hint->index();
1816 TraceAlloc(
1817 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1818 RegisterName(register_index),
1819 free_until_pos[register_index].Value(),
1820 current->id(),
1821 current->End().Value());
1822
1823 // The desired register is free until the end of the current live range.
1824 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1825 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1826 RegisterName(register_index),
1827 current->id());
1828 current->set_assigned_register(register_index, mode_);
1829 return true;
1830 }
1831 }
1832 }
1833
1834 // Find the register which stays free for the longest time.
1835 int reg = 0;
1836 for (int i = 1; i < RegisterCount(); ++i) {
1837 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1838 reg = i;
1839 }
1840 }
1841
1842 LifetimePosition pos = free_until_pos[reg];
1843
1844 if (pos.Value() <= current->Start().Value()) {
1845 // All registers are blocked.
1846 return false;
1847 }
1848
1849 if (pos.Value() < current->End().Value()) {
1850 // Register reg is available at the range start but becomes blocked before
1851 // the range end. Split current at position where it becomes blocked.
1852 LiveRange* tail = SplitAt(current, pos);
1853 AddToUnhandledSorted(tail);
1854 }
1855
1856
1857 // Register reg is available at the range start and is free until
1858 // the range end.
1859 ASSERT(pos.Value() >= current->End().Value());
1860 TraceAlloc("Assigning free reg %s to live range %d\n",
1861 RegisterName(reg),
1862 current->id());
1863 current->set_assigned_register(reg, mode_);
1864
1865 return true;
1866}
1867
1868
1869void LAllocator::AllocateBlockedReg(LiveRange* current) {
1870 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1871 if (register_use == NULL) {
1872 // There is no use in the current live range that requires a register.
1873 // We can just spill it.
1874 Spill(current);
1875 return;
1876 }
1877
1878
1879 LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1880 LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1881
1882 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1883 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1884 }
1885
1886 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1887 LiveRange* range = active_live_ranges_[i];
1888 int cur_reg = range->assigned_register();
1889 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1890 block_pos[cur_reg] = use_pos[cur_reg] =
1891 LifetimePosition::FromInstructionIndex(0);
1892 } else {
1893 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1894 current->Start());
1895 if (next_use == NULL) {
1896 use_pos[cur_reg] = range->End();
1897 } else {
1898 use_pos[cur_reg] = next_use->pos();
1899 }
1900 }
1901 }
1902
1903 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1904 LiveRange* range = inactive_live_ranges_.at(i);
1905 ASSERT(range->End().Value() > current->Start().Value());
1906 LifetimePosition next_intersection = range->FirstIntersection(current);
1907 if (!next_intersection.IsValid()) continue;
1908 int cur_reg = range->assigned_register();
1909 if (range->IsFixed()) {
1910 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1911 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1912 } else {
1913 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1914 }
1915 }
1916
1917 int reg = 0;
1918 for (int i = 1; i < RegisterCount(); ++i) {
1919 if (use_pos[i].Value() > use_pos[reg].Value()) {
1920 reg = i;
1921 }
1922 }
1923
1924 LifetimePosition pos = use_pos[reg];
1925
1926 if (pos.Value() < register_use->pos().Value()) {
1927 // All registers are blocked before the first use that requires a register.
1928 // Spill starting part of live range up to that use.
1929 //
1930 // Corner case: the first use position is equal to the start of the range.
1931 // In this case we have nothing to spill and SpillBetween will just return
1932 // this range to the list of unhandled ones. This will lead to the infinite
1933 // loop.
1934 ASSERT(current->Start().Value() < register_use->pos().Value());
1935 SpillBetween(current, current->Start(), register_use->pos());
1936 return;
1937 }
1938
1939 if (block_pos[reg].Value() < current->End().Value()) {
1940 // Register becomes blocked before the current range end. Split before that
1941 // position.
1942 LiveRange* tail = SplitBetween(current,
1943 current->Start(),
1944 block_pos[reg].InstructionStart());
1945 AddToUnhandledSorted(tail);
1946 }
1947
1948 // Register reg is not blocked for the whole range.
1949 ASSERT(block_pos[reg].Value() >= current->End().Value());
1950 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1951 RegisterName(reg),
1952 current->id());
1953 current->set_assigned_register(reg, mode_);
1954
1955 // This register was not free. Thus we need to find and spill
1956 // parts of active and inactive live regions that use the same register
1957 // at the same lifetime positions as current.
1958 SplitAndSpillIntersecting(current);
1959}
1960
1961
1962void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1963 ASSERT(current->HasRegisterAssigned());
1964 int reg = current->assigned_register();
1965 LifetimePosition split_pos = current->Start();
1966 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1967 LiveRange* range = active_live_ranges_[i];
1968 if (range->assigned_register() == reg) {
1969 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1970 if (next_pos == NULL) {
1971 SpillAfter(range, split_pos);
1972 } else {
1973 SpillBetween(range, split_pos, next_pos->pos());
1974 }
1975 ActiveToHandled(range);
1976 --i;
1977 }
1978 }
1979
1980 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1981 LiveRange* range = inactive_live_ranges_[i];
1982 ASSERT(range->End().Value() > current->Start().Value());
1983 if (range->assigned_register() == reg && !range->IsFixed()) {
1984 LifetimePosition next_intersection = range->FirstIntersection(current);
1985 if (next_intersection.IsValid()) {
1986 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1987 if (next_pos == NULL) {
1988 SpillAfter(range, split_pos);
1989 } else {
1990 next_intersection = Min(next_intersection, next_pos->pos());
1991 SpillBetween(range, split_pos, next_intersection);
1992 }
1993 InactiveToHandled(range);
1994 --i;
1995 }
1996 }
1997 }
1998}
1999
2000
2001bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2002 return pos.IsInstructionStart() &&
2003 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
2004}
2005
2006
2007void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
2008 UsePosition* prev_pos = prev->AddUsePosition(
2009 LifetimePosition::FromInstructionIndex(pos));
2010 UsePosition* next_pos = next->AddUsePosition(
2011 LifetimePosition::FromInstructionIndex(pos));
2012 LOperand* prev_operand = prev_pos->operand();
2013 LOperand* next_operand = next_pos->operand();
2014 LGap* gap = chunk_->GetGapAt(pos);
2015 gap->GetOrCreateParallelMove(LGap::START)->
2016 AddMove(prev_operand, next_operand);
2017 next_pos->set_hint(prev_operand);
2018}
2019
2020
2021LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
2022 ASSERT(!range->IsFixed());
2023 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2024
2025 if (pos.Value() <= range->Start().Value()) return range;
2026
2027 LiveRange* result = LiveRangeFor(next_virtual_register_++);
2028 range->SplitAt(pos, result);
2029 return result;
2030}
2031
2032
2033LiveRange* LAllocator::SplitBetween(LiveRange* range,
2034 LifetimePosition start,
2035 LifetimePosition end) {
2036 ASSERT(!range->IsFixed());
2037 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2038 range->id(),
2039 start.Value(),
2040 end.Value());
2041
2042 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2043 ASSERT(split_pos.Value() >= start.Value());
2044 return SplitAt(range, split_pos);
2045}
2046
2047
2048LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2049 LifetimePosition end) {
2050 int start_instr = start.InstructionIndex();
2051 int end_instr = end.InstructionIndex();
2052 ASSERT(start_instr <= end_instr);
2053
2054 // We have no choice
2055 if (start_instr == end_instr) return end;
2056
2057 HBasicBlock* end_block = GetBlock(start);
2058 HBasicBlock* start_block = GetBlock(end);
2059
2060 if (end_block == start_block) {
2061 // The interval is split in the same basic block. Split at latest possible
2062 // position.
2063 return end;
2064 }
2065
2066 HBasicBlock* block = end_block;
2067 // Find header of outermost loop.
2068 while (block->parent_loop_header() != NULL &&
2069 block->parent_loop_header()->block_id() > start_block->block_id()) {
2070 block = block->parent_loop_header();
2071 }
2072
2073 if (block == end_block) return end;
2074
2075 return LifetimePosition::FromInstructionIndex(
2076 block->first_instruction_index());
2077}
2078
2079
2080void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2081 LiveRange* second_part = SplitAt(range, pos);
2082 Spill(second_part);
2083}
2084
2085
2086void LAllocator::SpillBetween(LiveRange* range,
2087 LifetimePosition start,
2088 LifetimePosition end) {
2089 ASSERT(start.Value() < end.Value());
2090 LiveRange* second_part = SplitAt(range, start);
2091
2092 if (second_part->Start().Value() < end.Value()) {
2093 // The split result intersects with [start, end[.
2094 // Split it at position between ]start+1, end[, spill the middle part
2095 // and put the rest to unhandled.
2096 LiveRange* third_part = SplitBetween(
2097 second_part,
2098 second_part->Start().InstructionEnd(),
2099 end.PrevInstruction().InstructionEnd());
2100
2101 ASSERT(third_part != second_part);
2102
2103 Spill(second_part);
2104 AddToUnhandledSorted(third_part);
2105 } else {
2106 // The split result does not intersect with [start, end[.
2107 // Nothing to spill. Just put it to unhandled as whole.
2108 AddToUnhandledSorted(second_part);
2109 }
2110}
2111
2112
2113void LAllocator::Spill(LiveRange* range) {
2114 ASSERT(!range->IsSpilled());
2115 TraceAlloc("Spilling live range %d\n", range->id());
2116 LiveRange* first = range->TopLevel();
2117
2118 if (!first->HasAllocatedSpillOperand()) {
2119 LOperand* op = TryReuseSpillSlot(range);
2120 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2121 first->SetSpillOperand(op);
2122 }
2123 range->MakeSpilled();
2124}
2125
2126
2127int LAllocator::RegisterCount() const {
2128 return num_registers_;
2129}
2130
2131
2132#ifdef DEBUG
2133
2134
2135void LAllocator::Verify() const {
2136 for (int i = 0; i < live_ranges()->length(); ++i) {
2137 LiveRange* current = live_ranges()->at(i);
2138 if (current != NULL) current->Verify();
2139 }
2140}
2141
2142
2143#endif
2144
2145
2146} } // namespace v8::internal