blob: e4e64974b51e101184e27b4f717968e5029a1cb2 [file] [log] [blame]
Ben Murdoch257744e2011-11-30 15:57:28 +00001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_LITHIUM_ALLOCATOR_H_
29#define V8_LITHIUM_ALLOCATOR_H_
30
31#include "v8.h"
32
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "allocation.h"
Steve Block1e0659c2011-05-24 12:43:12 +010034#include "lithium.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010035#include "zone.h"
36
37namespace v8 {
38namespace internal {
39
40// Forward declarations.
41class HBasicBlock;
42class HGraph;
43class HInstruction;
44class HPhi;
45class HTracer;
46class HValue;
47class BitVector;
48class StringStream;
49
50class LArgument;
51class LChunk;
Steve Block1e0659c2011-05-24 12:43:12 +010052class LOperand;
53class LUnallocated;
Ben Murdochb0fe1622011-05-05 13:52:32 +010054class LConstantOperand;
55class LGap;
Ben Murdochb0fe1622011-05-05 13:52:32 +010056class LParallelMove;
57class LPointerMap;
58class LStackSlot;
59class LRegister;
60
61
62// This class represents a single point of a LOperand's lifetime.
63// For each lithium instruction there are exactly two lifetime positions:
64// the beginning and the end of the instruction. Lifetime positions for
65// different lithium instructions are disjoint.
66class LifetimePosition {
67 public:
68 // Return the lifetime position that corresponds to the beginning of
69 // the instruction with the given index.
70 static LifetimePosition FromInstructionIndex(int index) {
71 return LifetimePosition(index * kStep);
72 }
73
74 // Returns a numeric representation of this lifetime position.
75 int Value() const {
76 return value_;
77 }
78
79 // Returns the index of the instruction to which this lifetime position
80 // corresponds.
81 int InstructionIndex() const {
82 ASSERT(IsValid());
83 return value_ / kStep;
84 }
85
86 // Returns true if this lifetime position corresponds to the instruction
87 // start.
88 bool IsInstructionStart() const {
89 return (value_ & (kStep - 1)) == 0;
90 }
91
92 // Returns the lifetime position for the start of the instruction which
93 // corresponds to this lifetime position.
94 LifetimePosition InstructionStart() const {
95 ASSERT(IsValid());
96 return LifetimePosition(value_ & ~(kStep - 1));
97 }
98
99 // Returns the lifetime position for the end of the instruction which
100 // corresponds to this lifetime position.
101 LifetimePosition InstructionEnd() const {
102 ASSERT(IsValid());
103 return LifetimePosition(InstructionStart().Value() + kStep/2);
104 }
105
106 // Returns the lifetime position for the beginning of the next instruction.
107 LifetimePosition NextInstruction() const {
108 ASSERT(IsValid());
109 return LifetimePosition(InstructionStart().Value() + kStep);
110 }
111
112 // Returns the lifetime position for the beginning of the previous
113 // instruction.
114 LifetimePosition PrevInstruction() const {
115 ASSERT(IsValid());
116 ASSERT(value_ > 1);
117 return LifetimePosition(InstructionStart().Value() - kStep);
118 }
119
120 // Constructs the lifetime position which does not correspond to any
121 // instruction.
122 LifetimePosition() : value_(-1) {}
123
124 // Returns true if this lifetime positions corrensponds to some
125 // instruction.
126 bool IsValid() const { return value_ != -1; }
127
128 static inline LifetimePosition Invalid() { return LifetimePosition(); }
129
130 static inline LifetimePosition MaxPosition() {
131 // We have to use this kind of getter instead of static member due to
132 // crash bug in GDB.
133 return LifetimePosition(kMaxInt);
134 }
135
136 private:
137 static const int kStep = 2;
138
139 // Code relies on kStep being a power of two.
140 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
141
142 explicit LifetimePosition(int value) : value_(value) { }
143
144 int value_;
145};
146
147
148enum RegisterKind {
149 NONE,
150 GENERAL_REGISTERS,
151 DOUBLE_REGISTERS
152};
153
154
Ben Murdochb0fe1622011-05-05 13:52:32 +0100155// A register-allocator view of a Lithium instruction. It contains the id of
156// the output operand and a list of input operand uses.
Steve Block1e0659c2011-05-24 12:43:12 +0100157
158class LInstruction;
159class LEnvironment;
160
161// Iterator for non-null temp operands.
162class TempIterator BASE_EMBEDDED {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100163 public:
Steve Block1e0659c2011-05-24 12:43:12 +0100164 inline explicit TempIterator(LInstruction* instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000165 inline bool Done();
166 inline LOperand* Current();
Steve Block1e0659c2011-05-24 12:43:12 +0100167 inline void Advance();
Ben Murdoch086aeea2011-05-13 15:57:08 +0100168
Ben Murdochb0fe1622011-05-05 13:52:32 +0100169 private:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000170 inline void SkipUninteresting();
Steve Block1e0659c2011-05-24 12:43:12 +0100171 LInstruction* instr_;
172 int limit_;
173 int current_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100174};
175
Steve Block1e0659c2011-05-24 12:43:12 +0100176
177// Iterator for non-constant input operands.
178class InputIterator BASE_EMBEDDED {
179 public:
180 inline explicit InputIterator(LInstruction* instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000181 inline bool Done();
182 inline LOperand* Current();
Steve Block1e0659c2011-05-24 12:43:12 +0100183 inline void Advance();
184
185 private:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000186 inline void SkipUninteresting();
Steve Block1e0659c2011-05-24 12:43:12 +0100187 LInstruction* instr_;
188 int limit_;
189 int current_;
190};
191
192
193class UseIterator BASE_EMBEDDED {
194 public:
195 inline explicit UseIterator(LInstruction* instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000196 inline bool Done();
197 inline LOperand* Current();
Steve Block1e0659c2011-05-24 12:43:12 +0100198 inline void Advance();
199
200 private:
201 InputIterator input_iterator_;
202 DeepIterator env_iterator_;
203};
204
205
Ben Murdochb0fe1622011-05-05 13:52:32 +0100206// Representation of the non-empty interval [start,end[.
207class UseInterval: public ZoneObject {
208 public:
209 UseInterval(LifetimePosition start, LifetimePosition end)
210 : start_(start), end_(end), next_(NULL) {
211 ASSERT(start.Value() < end.Value());
212 }
213
214 LifetimePosition start() const { return start_; }
215 LifetimePosition end() const { return end_; }
216 UseInterval* next() const { return next_; }
217
218 // Split this interval at the given position without effecting the
219 // live range that owns it. The interval must contain the position.
220 void SplitAt(LifetimePosition pos);
221
222 // If this interval intersects with other return smallest position
223 // that belongs to both of them.
224 LifetimePosition Intersect(const UseInterval* other) const {
225 if (other->start().Value() < start_.Value()) return other->Intersect(this);
226 if (other->start().Value() < end_.Value()) return other->start();
227 return LifetimePosition::Invalid();
228 }
229
230 bool Contains(LifetimePosition point) const {
231 return start_.Value() <= point.Value() && point.Value() < end_.Value();
232 }
233
234 private:
235 void set_start(LifetimePosition start) { start_ = start; }
236 void set_next(UseInterval* next) { next_ = next; }
237
238 LifetimePosition start_;
239 LifetimePosition end_;
240 UseInterval* next_;
241
242 friend class LiveRange; // Assigns to start_.
243};
244
245// Representation of a use position.
246class UsePosition: public ZoneObject {
247 public:
Steve Block1e0659c2011-05-24 12:43:12 +0100248 UsePosition(LifetimePosition pos, LOperand* operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100249
250 LOperand* operand() const { return operand_; }
251 bool HasOperand() const { return operand_ != NULL; }
252
253 LOperand* hint() const { return hint_; }
254 void set_hint(LOperand* hint) { hint_ = hint; }
Steve Block1e0659c2011-05-24 12:43:12 +0100255 bool HasHint() const;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100256 bool RequiresRegister() const;
257 bool RegisterIsBeneficial() const;
258
259 LifetimePosition pos() const { return pos_; }
260 UsePosition* next() const { return next_; }
261
262 private:
263 void set_next(UsePosition* next) { next_ = next; }
264
265 LOperand* operand_;
266 LOperand* hint_;
267 LifetimePosition pos_;
268 UsePosition* next_;
269 bool requires_reg_;
270 bool register_beneficial_;
271
272 friend class LiveRange;
273};
274
275// Representation of SSA values' live ranges as a collection of (continuous)
276// intervals over the instruction ordering.
277class LiveRange: public ZoneObject {
278 public:
279 static const int kInvalidAssignment = 0x7fffffff;
280
Steve Block1e0659c2011-05-24 12:43:12 +0100281 explicit LiveRange(int id);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100282
283 UseInterval* first_interval() const { return first_interval_; }
284 UsePosition* first_pos() const { return first_pos_; }
285 LiveRange* parent() const { return parent_; }
286 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
287 LiveRange* next() const { return next_; }
288 bool IsChild() const { return parent() != NULL; }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100289 int id() const { return id_; }
290 bool IsFixed() const { return id_ < 0; }
291 bool IsEmpty() const { return first_interval() == NULL; }
292 LOperand* CreateAssignedOperand();
293 int assigned_register() const { return assigned_register_; }
294 int spill_start_index() const { return spill_start_index_; }
Steve Block1e0659c2011-05-24 12:43:12 +0100295 void set_assigned_register(int reg, RegisterKind register_kind);
296 void MakeSpilled();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100297
298 // Returns use position in this live range that follows both start
299 // and last processed use position.
300 // Modifies internal state of live range!
301 UsePosition* NextUsePosition(LifetimePosition start);
302
303 // Returns use position for which register is required in this live
304 // range and which follows both start and last processed use position
305 // Modifies internal state of live range!
306 UsePosition* NextRegisterPosition(LifetimePosition start);
307
308 // Returns use position for which register is beneficial in this live
309 // range and which follows both start and last processed use position
310 // Modifies internal state of live range!
311 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
312
313 // Can this live range be spilled at this position.
314 bool CanBeSpilled(LifetimePosition pos);
315
316 // Split this live range at the given position which must follow the start of
317 // the range.
318 // All uses following the given position will be moved from this
319 // live range to the result live range.
320 void SplitAt(LifetimePosition position, LiveRange* result);
321
322 bool IsDouble() const { return assigned_register_kind_ == DOUBLE_REGISTERS; }
323 bool HasRegisterAssigned() const {
324 return assigned_register_ != kInvalidAssignment;
325 }
326 bool IsSpilled() const { return spilled_; }
327 UsePosition* FirstPosWithHint() const;
328
329 LOperand* FirstHint() const {
330 UsePosition* pos = FirstPosWithHint();
331 if (pos != NULL) return pos->hint();
332 return NULL;
333 }
334
335 LifetimePosition Start() const {
336 ASSERT(!IsEmpty());
337 return first_interval()->start();
338 }
339
340 LifetimePosition End() const {
341 ASSERT(!IsEmpty());
342 return last_interval_->end();
343 }
344
Steve Block1e0659c2011-05-24 12:43:12 +0100345 bool HasAllocatedSpillOperand() const;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100346 LOperand* GetSpillOperand() const { return spill_operand_; }
Steve Block1e0659c2011-05-24 12:43:12 +0100347 void SetSpillOperand(LOperand* operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100348
349 void SetSpillStartIndex(int start) {
350 spill_start_index_ = Min(start, spill_start_index_);
351 }
352
353 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
354 bool CanCover(LifetimePosition position) const;
355 bool Covers(LifetimePosition position);
356 LifetimePosition FirstIntersection(LiveRange* other);
357
Ben Murdochb0fe1622011-05-05 13:52:32 +0100358 // Add a new interval or a new use position to this live range.
359 void EnsureInterval(LifetimePosition start, LifetimePosition end);
360 void AddUseInterval(LifetimePosition start, LifetimePosition end);
361 UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100362
363 // Shorten the most recently added interval by setting a new start.
364 void ShortenTo(LifetimePosition start);
365
366#ifdef DEBUG
367 // True if target overlaps an existing interval.
368 bool HasOverlap(UseInterval* target) const;
369 void Verify() const;
370#endif
371
372 private:
373 void ConvertOperands();
374 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
375 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
376 LifetimePosition but_not_past) const;
377
378 int id_;
379 bool spilled_;
380 int assigned_register_;
381 RegisterKind assigned_register_kind_;
382 UseInterval* last_interval_;
383 UseInterval* first_interval_;
384 UsePosition* first_pos_;
385 LiveRange* parent_;
386 LiveRange* next_;
387 // This is used as a cache, it doesn't affect correctness.
388 mutable UseInterval* current_interval_;
389 UsePosition* last_processed_use_;
390 LOperand* spill_operand_;
391 int spill_start_index_;
392};
393
394
Steve Block9fac8402011-05-12 15:51:54 +0100395class GrowableBitVector BASE_EMBEDDED {
396 public:
397 GrowableBitVector() : bits_(NULL) { }
398
399 bool Contains(int value) const {
400 if (!InBitsRange(value)) return false;
401 return bits_->Contains(value);
402 }
403
404 void Add(int value) {
405 EnsureCapacity(value);
406 bits_->Add(value);
407 }
408
409 private:
410 static const int kInitialLength = 1024;
411
412 bool InBitsRange(int value) const {
413 return bits_ != NULL && bits_->length() > value;
414 }
415
416 void EnsureCapacity(int value) {
417 if (InBitsRange(value)) return;
418 int new_length = bits_ == NULL ? kInitialLength : bits_->length();
419 while (new_length <= value) new_length *= 2;
420 BitVector* new_bits = new BitVector(new_length);
421 if (bits_ != NULL) new_bits->CopyFrom(*bits_);
422 bits_ = new_bits;
423 }
424
425 BitVector* bits_;
426};
427
428
Ben Murdochb0fe1622011-05-05 13:52:32 +0100429class LAllocator BASE_EMBEDDED {
430 public:
Steve Block44f0eee2011-05-26 01:26:41 +0100431 LAllocator(int first_virtual_register, HGraph* graph);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100432
Ben Murdochb0fe1622011-05-05 13:52:32 +0100433 static void TraceAlloc(const char* msg, ...);
434
435 // Lithium translation support.
436 // Record a use of an input operand in the current instruction.
437 void RecordUse(HValue* value, LUnallocated* operand);
438 // Record the definition of the output operand.
439 void RecordDefinition(HInstruction* instr, LUnallocated* operand);
440 // Record a temporary operand.
441 void RecordTemporary(LUnallocated* operand);
442
Ben Murdochb0fe1622011-05-05 13:52:32 +0100443 // Checks whether the value of a given virtual register is tagged.
444 bool HasTaggedValue(int virtual_register) const;
445
446 // Returns the register kind required by the given virtual register.
447 RegisterKind RequiredRegisterKind(int virtual_register) const;
448
Ben Murdochb0fe1622011-05-05 13:52:32 +0100449 // Control max function size.
450 static int max_initial_value_ids();
451
452 void Allocate(LChunk* chunk);
453
454 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100455 const Vector<LiveRange*>* fixed_live_ranges() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100456 return &fixed_live_ranges_;
457 }
Steve Block44f0eee2011-05-26 01:26:41 +0100458 const Vector<LiveRange*>* fixed_double_live_ranges() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100459 return &fixed_double_live_ranges_;
460 }
461
462 LChunk* chunk() const { return chunk_; }
463 HGraph* graph() const { return graph_; }
464
465 void MarkAsOsrEntry() {
466 // There can be only one.
467 ASSERT(!has_osr_entry_);
468 // Simply set a flag to find and process instruction later.
469 has_osr_entry_ = true;
470 }
471
472#ifdef DEBUG
473 void Verify() const;
474#endif
475
476 private:
477 void MeetRegisterConstraints();
478 void ResolvePhis();
479 void BuildLiveRanges();
480 void AllocateGeneralRegisters();
481 void AllocateDoubleRegisters();
482 void ConnectRanges();
483 void ResolveControlFlow();
484 void PopulatePointerMaps();
485 void ProcessOsrEntry();
486 void AllocateRegisters();
487 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
488 inline bool SafePointsAreInOrder() const;
489
490 // Liveness analysis support.
491 void InitializeLivenessAnalysis();
492 BitVector* ComputeLiveOut(HBasicBlock* block);
493 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
494 void ProcessInstructions(HBasicBlock* block, BitVector* live);
495 void MeetRegisterConstraints(HBasicBlock* block);
Steve Block1e0659c2011-05-24 12:43:12 +0100496 void MeetConstraintsBetween(LInstruction* first,
497 LInstruction* second,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100498 int gap_index);
499 void ResolvePhis(HBasicBlock* block);
500
501 // Helper methods for building intervals.
502 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
503 LiveRange* LiveRangeFor(LOperand* operand);
504 void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
505 void Use(LifetimePosition block_start,
506 LifetimePosition position,
507 LOperand* operand,
508 LOperand* hint);
509 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
510
511 // Helper methods for updating the life range lists.
512 void AddToActive(LiveRange* range);
513 void AddToInactive(LiveRange* range);
514 void AddToUnhandledSorted(LiveRange* range);
515 void AddToUnhandledUnsorted(LiveRange* range);
516 void SortUnhandled();
517 bool UnhandledIsSorted();
518 void ActiveToHandled(LiveRange* range);
519 void ActiveToInactive(LiveRange* range);
520 void InactiveToHandled(LiveRange* range);
521 void InactiveToActive(LiveRange* range);
522 void FreeSpillSlot(LiveRange* range);
523 LOperand* TryReuseSpillSlot(LiveRange* range);
524
525 // Helper methods for allocating registers.
526 bool TryAllocateFreeReg(LiveRange* range);
527 void AllocateBlockedReg(LiveRange* range);
528
529 // Live range splitting helpers.
530
531 // Split the given range at the given position.
532 // If range starts at or after the given position then the
533 // original range is returned.
534 // Otherwise returns the live range that starts at pos and contains
535 // all uses from the original range that follow pos. Uses at pos will
536 // still be owned by the original range after splitting.
537 LiveRange* SplitAt(LiveRange* range, LifetimePosition pos);
538
539 // Split the given range in a position from the interval [start, end].
540 LiveRange* SplitBetween(LiveRange* range,
541 LifetimePosition start,
542 LifetimePosition end);
543
544 // Find a lifetime position in the interval [start, end] which
545 // is optimal for splitting: it is either header of the outermost
546 // loop covered by this interval or the latest possible position.
547 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
548 LifetimePosition end);
549
550 // Spill the given life range after position pos.
551 void SpillAfter(LiveRange* range, LifetimePosition pos);
552
553 // Spill the given life range after position start and up to position end.
554 void SpillBetween(LiveRange* range,
555 LifetimePosition start,
556 LifetimePosition end);
557
558 void SplitAndSpillIntersecting(LiveRange* range);
559
560 void Spill(LiveRange* range);
561 bool IsBlockBoundary(LifetimePosition pos);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100562
563 // Helper methods for resolving control flow.
564 void ResolveControlFlow(LiveRange* range,
565 HBasicBlock* block,
566 HBasicBlock* pred);
567
568 // Return parallel move that should be used to connect ranges split at the
569 // given position.
570 LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
571
572 // Return the block which contains give lifetime position.
573 HBasicBlock* GetBlock(LifetimePosition pos);
574
Ben Murdochb0fe1622011-05-05 13:52:32 +0100575 // Helper methods for the fixed registers.
576 int RegisterCount() const;
577 static int FixedLiveRangeID(int index) { return -index - 1; }
578 static int FixedDoubleLiveRangeID(int index);
579 LiveRange* FixedLiveRangeFor(int index);
580 LiveRange* FixedDoubleLiveRangeFor(int index);
581 LiveRange* LiveRangeFor(int index);
582 HPhi* LookupPhi(LOperand* operand) const;
Steve Block1e0659c2011-05-24 12:43:12 +0100583 LGap* GetLastGap(HBasicBlock* block);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100584
585 const char* RegisterName(int allocation_index);
586
Steve Block1e0659c2011-05-24 12:43:12 +0100587 inline bool IsGapAt(int index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100588
Steve Block1e0659c2011-05-24 12:43:12 +0100589 inline LInstruction* InstructionAt(int index);
590
591 inline LGap* GapAt(int index);
592
593 LChunk* chunk_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100594
595 // During liveness analysis keep a mapping from block id to live_in sets
596 // for blocks already analyzed.
597 ZoneList<BitVector*> live_in_sets_;
598
599 // Liveness analysis results.
600 ZoneList<LiveRange*> live_ranges_;
601
602 // Lists of live ranges
Steve Block44f0eee2011-05-26 01:26:41 +0100603 EmbeddedVector<LiveRange*, Register::kNumAllocatableRegisters>
604 fixed_live_ranges_;
605 EmbeddedVector<LiveRange*, DoubleRegister::kNumAllocatableRegisters>
606 fixed_double_live_ranges_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100607 ZoneList<LiveRange*> unhandled_live_ranges_;
608 ZoneList<LiveRange*> active_live_ranges_;
609 ZoneList<LiveRange*> inactive_live_ranges_;
610 ZoneList<LiveRange*> reusable_slots_;
611
612 // Next virtual register number to be assigned to temporaries.
613 int next_virtual_register_;
Steve Block9fac8402011-05-12 15:51:54 +0100614 int first_artificial_register_;
615 GrowableBitVector double_artificial_registers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100616
617 RegisterKind mode_;
618 int num_registers_;
619
620 HGraph* graph_;
621
622 bool has_osr_entry_;
623
624 DISALLOW_COPY_AND_ASSIGN(LAllocator);
625};
626
627
628} } // namespace v8::internal
629
630#endif // V8_LITHIUM_ALLOCATOR_H_