blob: df9d393a8f4fa9157cfcc0b083cfe77b799341f0 [file] [log] [blame]
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001//===-- RegAllocLinearScan.cpp - Linear Scan register allocator -----------===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner081ce942007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Dan Gohmanf17a25c2007-07-18 16:29:46 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements a linear scan register allocator.
11//
12//===----------------------------------------------------------------------===//
13
14#define DEBUG_TYPE "regalloc"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000015#include "PhysRegTracker.h"
16#include "VirtRegMap.h"
17#include "llvm/Function.h"
Evan Cheng14f8a502008-06-04 09:18:41 +000018#include "llvm/CodeGen/LiveIntervalAnalysis.h"
19#include "llvm/CodeGen/LiveStackAnalysis.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000020#include "llvm/CodeGen/MachineFunctionPass.h"
21#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng26d17df2007-12-11 02:09:15 +000022#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner1b989192007-12-31 04:13:23 +000023#include "llvm/CodeGen/MachineRegisterInfo.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000024#include "llvm/CodeGen/Passes.h"
25#include "llvm/CodeGen/RegAllocRegistry.h"
David Greene1d80f1b2007-09-06 16:18:45 +000026#include "llvm/CodeGen/RegisterCoalescer.h"
Dan Gohman1e57df32008-02-10 18:45:23 +000027#include "llvm/Target/TargetRegisterInfo.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000028#include "llvm/Target/TargetMachine.h"
Evan Chengc4c75f52007-11-03 07:20:12 +000029#include "llvm/Target/TargetInstrInfo.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000030#include "llvm/ADT/EquivalenceClasses.h"
31#include "llvm/ADT/Statistic.h"
32#include "llvm/ADT/STLExtras.h"
33#include "llvm/Support/Debug.h"
34#include "llvm/Support/Compiler.h"
35#include <algorithm>
36#include <set>
37#include <queue>
38#include <memory>
39#include <cmath>
40using namespace llvm;
41
42STATISTIC(NumIters , "Number of iterations performed");
43STATISTIC(NumBacktracks, "Number of times we had to backtrack");
Evan Chengc4c75f52007-11-03 07:20:12 +000044STATISTIC(NumCoalesce, "Number of copies coalesced");
Dan Gohmanf17a25c2007-07-18 16:29:46 +000045
Evan Chengc5952452008-06-20 21:45:16 +000046static cl::opt<bool>
47NewHeuristic("new-spilling-heuristic",
48 cl::desc("Use new spilling heuristic"),
49 cl::init(false), cl::Hidden);
50
Dan Gohmanf17a25c2007-07-18 16:29:46 +000051static RegisterRegAlloc
52linearscanRegAlloc("linearscan", " linear scan register allocator",
53 createLinearScanRegisterAllocator);
54
55namespace {
Dan Gohmanf17a25c2007-07-18 16:29:46 +000056 struct VISIBILITY_HIDDEN RALinScan : public MachineFunctionPass {
57 static char ID;
Dan Gohman26f8c272008-09-04 17:05:41 +000058 RALinScan() : MachineFunctionPass(&ID) {}
Dan Gohmanf17a25c2007-07-18 16:29:46 +000059
60 typedef std::pair<LiveInterval*, LiveInterval::iterator> IntervalPtr;
Owen Andersonba926a32008-08-15 18:49:41 +000061 typedef SmallVector<IntervalPtr, 32> IntervalPtrs;
Dan Gohmanf17a25c2007-07-18 16:29:46 +000062 private:
63 /// RelatedRegClasses - This structure is built the first time a function is
64 /// compiled, and keeps track of which register classes have registers that
65 /// belong to multiple classes or have aliases that are in other classes.
66 EquivalenceClasses<const TargetRegisterClass*> RelatedRegClasses;
Owen Anderson4a472712008-08-13 23:36:23 +000067 DenseMap<unsigned, const TargetRegisterClass*> OneClassForEachPhysReg;
Dan Gohmanf17a25c2007-07-18 16:29:46 +000068
69 MachineFunction* mf_;
Evan Chengc5952452008-06-20 21:45:16 +000070 MachineRegisterInfo* mri_;
Dan Gohmanf17a25c2007-07-18 16:29:46 +000071 const TargetMachine* tm_;
Dan Gohman1e57df32008-02-10 18:45:23 +000072 const TargetRegisterInfo* tri_;
Evan Chengc4c75f52007-11-03 07:20:12 +000073 const TargetInstrInfo* tii_;
Evan Chengc4c75f52007-11-03 07:20:12 +000074 BitVector allocatableRegs_;
Dan Gohmanf17a25c2007-07-18 16:29:46 +000075 LiveIntervals* li_;
Evan Cheng14f8a502008-06-04 09:18:41 +000076 LiveStacks* ls_;
Evan Cheng26d17df2007-12-11 02:09:15 +000077 const MachineLoopInfo *loopInfo;
Dan Gohmanf17a25c2007-07-18 16:29:46 +000078
79 /// handled_ - Intervals are added to the handled_ set in the order of their
80 /// start value. This is uses for backtracking.
81 std::vector<LiveInterval*> handled_;
82
83 /// fixed_ - Intervals that correspond to machine registers.
84 ///
85 IntervalPtrs fixed_;
86
87 /// active_ - Intervals that are currently being processed, and which have a
88 /// live range active for the current point.
89 IntervalPtrs active_;
90
91 /// inactive_ - Intervals that are currently being processed, but which have
92 /// a hold at the current point.
93 IntervalPtrs inactive_;
94
95 typedef std::priority_queue<LiveInterval*,
Owen Andersonba926a32008-08-15 18:49:41 +000096 SmallVector<LiveInterval*, 64>,
Dan Gohmanf17a25c2007-07-18 16:29:46 +000097 greater_ptr<LiveInterval> > IntervalHeap;
98 IntervalHeap unhandled_;
99 std::auto_ptr<PhysRegTracker> prt_;
100 std::auto_ptr<VirtRegMap> vrm_;
101 std::auto_ptr<Spiller> spiller_;
102
103 public:
104 virtual const char* getPassName() const {
105 return "Linear Scan Register Allocator";
106 }
107
108 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
109 AU.addRequired<LiveIntervals>();
David Greene1d80f1b2007-09-06 16:18:45 +0000110 // Make sure PassManager knows which analyses to make available
111 // to coalescing and which analyses coalescing invalidates.
112 AU.addRequiredTransitive<RegisterCoalescer>();
Evan Cheng14f8a502008-06-04 09:18:41 +0000113 AU.addRequired<LiveStacks>();
114 AU.addPreserved<LiveStacks>();
Evan Cheng26d17df2007-12-11 02:09:15 +0000115 AU.addRequired<MachineLoopInfo>();
Bill Wendling62264362008-01-04 20:54:55 +0000116 AU.addPreserved<MachineLoopInfo>();
117 AU.addPreservedID(MachineDominatorsID);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000118 MachineFunctionPass::getAnalysisUsage(AU);
119 }
120
121 /// runOnMachineFunction - register allocate the whole function
122 bool runOnMachineFunction(MachineFunction&);
123
124 private:
125 /// linearScan - the linear scan algorithm
126 void linearScan();
127
128 /// initIntervalSets - initialize the interval sets.
129 ///
130 void initIntervalSets();
131
132 /// processActiveIntervals - expire old intervals and move non-overlapping
133 /// ones to the inactive list.
134 void processActiveIntervals(unsigned CurPoint);
135
136 /// processInactiveIntervals - expire old intervals and move overlapping
137 /// ones to the active list.
138 void processInactiveIntervals(unsigned CurPoint);
139
140 /// assignRegOrStackSlotAtInterval - assign a register if one
141 /// is available, or spill.
142 void assignRegOrStackSlotAtInterval(LiveInterval* cur);
143
Evan Chengc5952452008-06-20 21:45:16 +0000144 /// findIntervalsToSpill - Determine the intervals to spill for the
145 /// specified interval. It's passed the physical registers whose spill
146 /// weight is the lowest among all the registers whose live intervals
147 /// conflict with the interval.
148 void findIntervalsToSpill(LiveInterval *cur,
149 std::vector<std::pair<unsigned,float> > &Candidates,
150 unsigned NumCands,
151 SmallVector<LiveInterval*, 8> &SpillIntervals);
152
Evan Chengc4c75f52007-11-03 07:20:12 +0000153 /// attemptTrivialCoalescing - If a simple interval is defined by a copy,
154 /// try allocate the definition the same register as the source register
155 /// if the register is not defined during live time of the interval. This
156 /// eliminate a copy. This is used to coalesce copies which were not
157 /// coalesced away before allocation either due to dest and src being in
158 /// different register classes or because the coalescer was overly
159 /// conservative.
160 unsigned attemptTrivialCoalescing(LiveInterval &cur, unsigned Reg);
161
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000162 ///
163 /// register handling helpers
164 ///
165
166 /// getFreePhysReg - return a free physical register for this virtual
167 /// register interval if we have one, otherwise return 0.
168 unsigned getFreePhysReg(LiveInterval* cur);
169
170 /// assignVirt2StackSlot - assigns this virtual register to a
171 /// stack slot. returns the stack slot
172 int assignVirt2StackSlot(unsigned virtReg);
173
174 void ComputeRelatedRegClasses();
175
Dale Johannesenf9b08792008-09-19 01:02:35 +0000176 bool noEarlyClobberConflict(LiveInterval *cur, unsigned RegNo);
Dale Johannesen6513fc72008-09-19 18:52:31 +0000177 unsigned findPhysReg(MachineOperand &MO);
Dale Johannesenf9b08792008-09-19 01:02:35 +0000178
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000179 template <typename ItTy>
180 void printIntervals(const char* const str, ItTy i, ItTy e) const {
181 if (str) DOUT << str << " intervals:\n";
182 for (; i != e; ++i) {
183 DOUT << "\t" << *i->first << " -> ";
184 unsigned reg = i->first->reg;
Dan Gohman1e57df32008-02-10 18:45:23 +0000185 if (TargetRegisterInfo::isVirtualRegister(reg)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000186 reg = vrm_->getPhys(reg);
187 }
Bill Wendling9b0baeb2008-02-26 21:47:57 +0000188 DOUT << tri_->getName(reg) << '\n';
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000189 }
190 }
191 };
192 char RALinScan::ID = 0;
193}
194
Evan Cheng14f8a502008-06-04 09:18:41 +0000195static RegisterPass<RALinScan>
196X("linearscan-regalloc", "Linear Scan Register Allocator");
197
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000198void RALinScan::ComputeRelatedRegClasses() {
Dan Gohman1e57df32008-02-10 18:45:23 +0000199 const TargetRegisterInfo &TRI = *tri_;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000200
201 // First pass, add all reg classes to the union, and determine at least one
202 // reg class that each register is in.
203 bool HasAliases = false;
Dan Gohman1e57df32008-02-10 18:45:23 +0000204 for (TargetRegisterInfo::regclass_iterator RCI = TRI.regclass_begin(),
205 E = TRI.regclass_end(); RCI != E; ++RCI) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000206 RelatedRegClasses.insert(*RCI);
207 for (TargetRegisterClass::iterator I = (*RCI)->begin(), E = (*RCI)->end();
208 I != E; ++I) {
Dan Gohman1e57df32008-02-10 18:45:23 +0000209 HasAliases = HasAliases || *TRI.getAliasSet(*I) != 0;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000210
211 const TargetRegisterClass *&PRC = OneClassForEachPhysReg[*I];
212 if (PRC) {
213 // Already processed this register. Just make sure we know that
214 // multiple register classes share a register.
215 RelatedRegClasses.unionSets(PRC, *RCI);
216 } else {
217 PRC = *RCI;
218 }
219 }
220 }
221
222 // Second pass, now that we know conservatively what register classes each reg
223 // belongs to, add info about aliases. We don't need to do this for targets
224 // without register aliases.
225 if (HasAliases)
Owen Anderson4a472712008-08-13 23:36:23 +0000226 for (DenseMap<unsigned, const TargetRegisterClass*>::iterator
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000227 I = OneClassForEachPhysReg.begin(), E = OneClassForEachPhysReg.end();
228 I != E; ++I)
Dan Gohman1e57df32008-02-10 18:45:23 +0000229 for (const unsigned *AS = TRI.getAliasSet(I->first); *AS; ++AS)
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000230 RelatedRegClasses.unionSets(I->second, OneClassForEachPhysReg[*AS]);
231}
232
Evan Chengc4c75f52007-11-03 07:20:12 +0000233/// attemptTrivialCoalescing - If a simple interval is defined by a copy,
234/// try allocate the definition the same register as the source register
235/// if the register is not defined during live time of the interval. This
236/// eliminate a copy. This is used to coalesce copies which were not
237/// coalesced away before allocation either due to dest and src being in
238/// different register classes or because the coalescer was overly
239/// conservative.
240unsigned RALinScan::attemptTrivialCoalescing(LiveInterval &cur, unsigned Reg) {
Evan Chengb6aa6712007-11-04 08:32:21 +0000241 if ((cur.preference && cur.preference == Reg) || !cur.containsOneValue())
Evan Chengc4c75f52007-11-03 07:20:12 +0000242 return Reg;
243
244 VNInfo *vni = cur.getValNumInfo(0);
245 if (!vni->def || vni->def == ~1U || vni->def == ~0U)
246 return Reg;
247 MachineInstr *CopyMI = li_->getInstructionFromIndex(vni->def);
248 unsigned SrcReg, DstReg;
249 if (!CopyMI || !tii_->isMoveInstr(*CopyMI, SrcReg, DstReg))
250 return Reg;
Anton Korobeynikov6a4a9332008-02-20 12:07:57 +0000251 if (TargetRegisterInfo::isVirtualRegister(SrcReg)) {
Evan Chengc4c75f52007-11-03 07:20:12 +0000252 if (!vrm_->isAssignedReg(SrcReg))
253 return Reg;
254 else
255 SrcReg = vrm_->getPhys(SrcReg);
Anton Korobeynikov6a4a9332008-02-20 12:07:57 +0000256 }
Evan Chengc4c75f52007-11-03 07:20:12 +0000257 if (Reg == SrcReg)
258 return Reg;
259
Evan Cheng06b74c52008-09-18 22:38:47 +0000260 const TargetRegisterClass *RC = mri_->getRegClass(cur.reg);
Evan Chengc4c75f52007-11-03 07:20:12 +0000261 if (!RC->contains(SrcReg))
262 return Reg;
263
264 // Try to coalesce.
265 if (!li_->conflictsWithPhysRegDef(cur, *vrm_, SrcReg)) {
Bill Wendling9b0baeb2008-02-26 21:47:57 +0000266 DOUT << "Coalescing: " << cur << " -> " << tri_->getName(SrcReg)
Bill Wendling8eeb9792008-02-26 21:11:01 +0000267 << '\n';
Evan Chengc4c75f52007-11-03 07:20:12 +0000268 vrm_->clearVirt(cur.reg);
269 vrm_->assignVirt2Phys(cur.reg, SrcReg);
270 ++NumCoalesce;
271 return SrcReg;
272 }
273
274 return Reg;
275}
276
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000277bool RALinScan::runOnMachineFunction(MachineFunction &fn) {
278 mf_ = &fn;
Evan Chengc5952452008-06-20 21:45:16 +0000279 mri_ = &fn.getRegInfo();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000280 tm_ = &fn.getTarget();
Dan Gohman1e57df32008-02-10 18:45:23 +0000281 tri_ = tm_->getRegisterInfo();
Evan Chengc4c75f52007-11-03 07:20:12 +0000282 tii_ = tm_->getInstrInfo();
Dan Gohman1e57df32008-02-10 18:45:23 +0000283 allocatableRegs_ = tri_->getAllocatableSet(fn);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000284 li_ = &getAnalysis<LiveIntervals>();
Evan Cheng14f8a502008-06-04 09:18:41 +0000285 ls_ = &getAnalysis<LiveStacks>();
Evan Cheng26d17df2007-12-11 02:09:15 +0000286 loopInfo = &getAnalysis<MachineLoopInfo>();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000287
David Greene1d80f1b2007-09-06 16:18:45 +0000288 // We don't run the coalescer here because we have no reason to
289 // interact with it. If the coalescer requires interaction, it
290 // won't do anything. If it doesn't require interaction, we assume
291 // it was run as a separate pass.
292
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000293 // If this is the first function compiled, compute the related reg classes.
294 if (RelatedRegClasses.empty())
295 ComputeRelatedRegClasses();
296
Dan Gohman1e57df32008-02-10 18:45:23 +0000297 if (!prt_.get()) prt_.reset(new PhysRegTracker(*tri_));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000298 vrm_.reset(new VirtRegMap(*mf_));
299 if (!spiller_.get()) spiller_.reset(createSpiller());
300
301 initIntervalSets();
302
303 linearScan();
304
305 // Rewrite spill code and update the PhysRegsUsed set.
306 spiller_->runOnMachineFunction(*mf_, *vrm_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000307 vrm_.reset(); // Free the VirtRegMap
308
Dan Gohman79a9f152008-06-23 23:51:16 +0000309 assert(unhandled_.empty() && "Unhandled live intervals remain!");
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000310 fixed_.clear();
311 active_.clear();
312 inactive_.clear();
313 handled_.clear();
314
315 return true;
316}
317
318/// initIntervalSets - initialize the interval sets.
319///
320void RALinScan::initIntervalSets()
321{
322 assert(unhandled_.empty() && fixed_.empty() &&
323 active_.empty() && inactive_.empty() &&
324 "interval sets should be empty on initialization");
325
Owen Andersonba926a32008-08-15 18:49:41 +0000326 handled_.reserve(li_->getNumIntervals());
327
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000328 for (LiveIntervals::iterator i = li_->begin(), e = li_->end(); i != e; ++i) {
Owen Anderson348d1d82008-08-13 21:49:13 +0000329 if (TargetRegisterInfo::isPhysicalRegister(i->second->reg)) {
Evan Cheng06b74c52008-09-18 22:38:47 +0000330 mri_->setPhysRegUsed(i->second->reg);
Owen Anderson348d1d82008-08-13 21:49:13 +0000331 fixed_.push_back(std::make_pair(i->second, i->second->begin()));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000332 } else
Owen Anderson348d1d82008-08-13 21:49:13 +0000333 unhandled_.push(i->second);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000334 }
335}
336
337void RALinScan::linearScan()
338{
339 // linear scan algorithm
340 DOUT << "********** LINEAR SCAN **********\n";
341 DOUT << "********** Function: " << mf_->getFunction()->getName() << '\n';
342
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000343 DEBUG(printIntervals("fixed", fixed_.begin(), fixed_.end()));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000344
345 while (!unhandled_.empty()) {
346 // pick the interval with the earliest start point
347 LiveInterval* cur = unhandled_.top();
348 unhandled_.pop();
Evan Chengd48f2bc2007-10-16 21:09:14 +0000349 ++NumIters;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000350 DOUT << "\n*** CURRENT ***: " << *cur << '\n';
351
Evan Chenga3186992008-04-03 16:40:27 +0000352 if (!cur->empty()) {
353 processActiveIntervals(cur->beginNumber());
354 processInactiveIntervals(cur->beginNumber());
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000355
Evan Chenga3186992008-04-03 16:40:27 +0000356 assert(TargetRegisterInfo::isVirtualRegister(cur->reg) &&
357 "Can only allocate virtual registers!");
358 }
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000359
360 // Allocating a virtual register. try to find a free
361 // physical register or spill an interval (possibly this one) in order to
362 // assign it one.
363 assignRegOrStackSlotAtInterval(cur);
364
365 DEBUG(printIntervals("active", active_.begin(), active_.end()));
366 DEBUG(printIntervals("inactive", inactive_.begin(), inactive_.end()));
367 }
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000368
369 // expire any remaining active intervals
Evan Chengd48f2bc2007-10-16 21:09:14 +0000370 while (!active_.empty()) {
371 IntervalPtr &IP = active_.back();
372 unsigned reg = IP.first->reg;
373 DOUT << "\tinterval " << *IP.first << " expired\n";
Dan Gohman1e57df32008-02-10 18:45:23 +0000374 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000375 "Can only allocate virtual registers!");
376 reg = vrm_->getPhys(reg);
377 prt_->delRegUse(reg);
Evan Chengd48f2bc2007-10-16 21:09:14 +0000378 active_.pop_back();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000379 }
380
381 // expire any remaining inactive intervals
Evan Chengd48f2bc2007-10-16 21:09:14 +0000382 DEBUG(for (IntervalPtrs::reverse_iterator
Bill Wendling1817ab82007-11-15 00:40:48 +0000383 i = inactive_.rbegin(); i != inactive_.rend(); ++i)
Evan Chengd48f2bc2007-10-16 21:09:14 +0000384 DOUT << "\tinterval " << *i->first << " expired\n");
385 inactive_.clear();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000386
Evan Chengcecc8222007-11-17 00:40:40 +0000387 // Add live-ins to every BB except for entry. Also perform trivial coalescing.
Evan Chengf5cdf122007-10-17 02:12:22 +0000388 MachineFunction::iterator EntryMBB = mf_->begin();
Evan Cheng12d6fcb2007-10-17 06:53:44 +0000389 SmallVector<MachineBasicBlock*, 8> LiveInMBBs;
Evan Chengf5cdf122007-10-17 02:12:22 +0000390 for (LiveIntervals::iterator i = li_->begin(), e = li_->end(); i != e; ++i) {
Owen Anderson348d1d82008-08-13 21:49:13 +0000391 LiveInterval &cur = *i->second;
Evan Chengf5cdf122007-10-17 02:12:22 +0000392 unsigned Reg = 0;
Dan Gohman1e57df32008-02-10 18:45:23 +0000393 bool isPhys = TargetRegisterInfo::isPhysicalRegister(cur.reg);
Evan Chengcecc8222007-11-17 00:40:40 +0000394 if (isPhys)
Owen Anderson348d1d82008-08-13 21:49:13 +0000395 Reg = cur.reg;
Evan Chengf5cdf122007-10-17 02:12:22 +0000396 else if (vrm_->isAssignedReg(cur.reg))
Evan Chengc4c75f52007-11-03 07:20:12 +0000397 Reg = attemptTrivialCoalescing(cur, vrm_->getPhys(cur.reg));
Evan Chengf5cdf122007-10-17 02:12:22 +0000398 if (!Reg)
399 continue;
Evan Chengcecc8222007-11-17 00:40:40 +0000400 // Ignore splited live intervals.
401 if (!isPhys && vrm_->getPreSplitReg(cur.reg))
402 continue;
Evan Chengf5cdf122007-10-17 02:12:22 +0000403 for (LiveInterval::Ranges::const_iterator I = cur.begin(), E = cur.end();
404 I != E; ++I) {
405 const LiveRange &LR = *I;
Evan Chengf5cdf122007-10-17 02:12:22 +0000406 if (li_->findLiveInMBBs(LR, LiveInMBBs)) {
407 for (unsigned i = 0, e = LiveInMBBs.size(); i != e; ++i)
408 if (LiveInMBBs[i] != EntryMBB)
409 LiveInMBBs[i]->addLiveIn(Reg);
Evan Cheng12d6fcb2007-10-17 06:53:44 +0000410 LiveInMBBs.clear();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000411 }
412 }
413 }
414
415 DOUT << *vrm_;
416}
417
418/// processActiveIntervals - expire old intervals and move non-overlapping ones
419/// to the inactive list.
420void RALinScan::processActiveIntervals(unsigned CurPoint)
421{
422 DOUT << "\tprocessing active intervals:\n";
423
424 for (unsigned i = 0, e = active_.size(); i != e; ++i) {
425 LiveInterval *Interval = active_[i].first;
426 LiveInterval::iterator IntervalPos = active_[i].second;
427 unsigned reg = Interval->reg;
428
429 IntervalPos = Interval->advanceTo(IntervalPos, CurPoint);
430
431 if (IntervalPos == Interval->end()) { // Remove expired intervals.
432 DOUT << "\t\tinterval " << *Interval << " expired\n";
Dan Gohman1e57df32008-02-10 18:45:23 +0000433 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000434 "Can only allocate virtual registers!");
435 reg = vrm_->getPhys(reg);
436 prt_->delRegUse(reg);
437
438 // Pop off the end of the list.
439 active_[i] = active_.back();
440 active_.pop_back();
441 --i; --e;
442
443 } else if (IntervalPos->start > CurPoint) {
444 // Move inactive intervals to inactive list.
445 DOUT << "\t\tinterval " << *Interval << " inactive\n";
Dan Gohman1e57df32008-02-10 18:45:23 +0000446 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000447 "Can only allocate virtual registers!");
448 reg = vrm_->getPhys(reg);
449 prt_->delRegUse(reg);
450 // add to inactive.
451 inactive_.push_back(std::make_pair(Interval, IntervalPos));
452
453 // Pop off the end of the list.
454 active_[i] = active_.back();
455 active_.pop_back();
456 --i; --e;
457 } else {
458 // Otherwise, just update the iterator position.
459 active_[i].second = IntervalPos;
460 }
461 }
462}
463
464/// processInactiveIntervals - expire old intervals and move overlapping
465/// ones to the active list.
466void RALinScan::processInactiveIntervals(unsigned CurPoint)
467{
468 DOUT << "\tprocessing inactive intervals:\n";
469
470 for (unsigned i = 0, e = inactive_.size(); i != e; ++i) {
471 LiveInterval *Interval = inactive_[i].first;
472 LiveInterval::iterator IntervalPos = inactive_[i].second;
473 unsigned reg = Interval->reg;
474
475 IntervalPos = Interval->advanceTo(IntervalPos, CurPoint);
476
477 if (IntervalPos == Interval->end()) { // remove expired intervals.
478 DOUT << "\t\tinterval " << *Interval << " expired\n";
479
480 // Pop off the end of the list.
481 inactive_[i] = inactive_.back();
482 inactive_.pop_back();
483 --i; --e;
484 } else if (IntervalPos->start <= CurPoint) {
485 // move re-activated intervals in active list
486 DOUT << "\t\tinterval " << *Interval << " active\n";
Dan Gohman1e57df32008-02-10 18:45:23 +0000487 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000488 "Can only allocate virtual registers!");
489 reg = vrm_->getPhys(reg);
490 prt_->addRegUse(reg);
491 // add to active
492 active_.push_back(std::make_pair(Interval, IntervalPos));
493
494 // Pop off the end of the list.
495 inactive_[i] = inactive_.back();
496 inactive_.pop_back();
497 --i; --e;
498 } else {
499 // Otherwise, just update the iterator position.
500 inactive_[i].second = IntervalPos;
501 }
502 }
503}
504
505/// updateSpillWeights - updates the spill weights of the specifed physical
506/// register and its weight.
507static void updateSpillWeights(std::vector<float> &Weights,
508 unsigned reg, float weight,
Dan Gohman1e57df32008-02-10 18:45:23 +0000509 const TargetRegisterInfo *TRI) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000510 Weights[reg] += weight;
Dan Gohman1e57df32008-02-10 18:45:23 +0000511 for (const unsigned* as = TRI->getAliasSet(reg); *as; ++as)
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000512 Weights[*as] += weight;
513}
514
515static
516RALinScan::IntervalPtrs::iterator
517FindIntervalInVector(RALinScan::IntervalPtrs &IP, LiveInterval *LI) {
518 for (RALinScan::IntervalPtrs::iterator I = IP.begin(), E = IP.end();
519 I != E; ++I)
520 if (I->first == LI) return I;
521 return IP.end();
522}
523
524static void RevertVectorIteratorsTo(RALinScan::IntervalPtrs &V, unsigned Point){
525 for (unsigned i = 0, e = V.size(); i != e; ++i) {
526 RALinScan::IntervalPtr &IP = V[i];
527 LiveInterval::iterator I = std::upper_bound(IP.first->begin(),
528 IP.second, Point);
529 if (I != IP.first->begin()) --I;
530 IP.second = I;
531 }
532}
533
Evan Cheng14f8a502008-06-04 09:18:41 +0000534/// addStackInterval - Create a LiveInterval for stack if the specified live
535/// interval has been spilled.
536static void addStackInterval(LiveInterval *cur, LiveStacks *ls_,
Evan Chengba221ca2008-06-06 07:54:39 +0000537 LiveIntervals *li_, float &Weight,
538 VirtRegMap &vrm_) {
Evan Cheng14f8a502008-06-04 09:18:41 +0000539 int SS = vrm_.getStackSlot(cur->reg);
540 if (SS == VirtRegMap::NO_STACK_SLOT)
541 return;
542 LiveInterval &SI = ls_->getOrCreateInterval(SS);
Evan Chengba221ca2008-06-06 07:54:39 +0000543 SI.weight += Weight;
544
Evan Cheng14f8a502008-06-04 09:18:41 +0000545 VNInfo *VNI;
546 if (SI.getNumValNums())
547 VNI = SI.getValNumInfo(0);
548 else
549 VNI = SI.getNextValue(~0U, 0, ls_->getVNInfoAllocator());
550
551 LiveInterval &RI = li_->getInterval(cur->reg);
552 // FIXME: This may be overly conservative.
553 SI.MergeRangesInAsValue(RI, VNI);
Evan Cheng14f8a502008-06-04 09:18:41 +0000554}
555
Evan Chengc5952452008-06-20 21:45:16 +0000556/// getConflictWeight - Return the number of conflicts between cur
557/// live interval and defs and uses of Reg weighted by loop depthes.
558static float getConflictWeight(LiveInterval *cur, unsigned Reg,
559 LiveIntervals *li_,
560 MachineRegisterInfo *mri_,
561 const MachineLoopInfo *loopInfo) {
562 float Conflicts = 0;
563 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(Reg),
564 E = mri_->reg_end(); I != E; ++I) {
565 MachineInstr *MI = &*I;
566 if (cur->liveAt(li_->getInstructionIndex(MI))) {
567 unsigned loopDepth = loopInfo->getLoopDepth(MI->getParent());
568 Conflicts += powf(10.0f, (float)loopDepth);
569 }
570 }
571 return Conflicts;
572}
573
574/// findIntervalsToSpill - Determine the intervals to spill for the
575/// specified interval. It's passed the physical registers whose spill
576/// weight is the lowest among all the registers whose live intervals
577/// conflict with the interval.
578void RALinScan::findIntervalsToSpill(LiveInterval *cur,
579 std::vector<std::pair<unsigned,float> > &Candidates,
580 unsigned NumCands,
581 SmallVector<LiveInterval*, 8> &SpillIntervals) {
582 // We have figured out the *best* register to spill. But there are other
583 // registers that are pretty good as well (spill weight within 3%). Spill
584 // the one that has fewest defs and uses that conflict with cur.
585 float Conflicts[3] = { 0.0f, 0.0f, 0.0f };
586 SmallVector<LiveInterval*, 8> SLIs[3];
587
588 DOUT << "\tConsidering " << NumCands << " candidates: ";
589 DEBUG(for (unsigned i = 0; i != NumCands; ++i)
590 DOUT << tri_->getName(Candidates[i].first) << " ";
591 DOUT << "\n";);
592
593 // Calculate the number of conflicts of each candidate.
594 for (IntervalPtrs::iterator i = active_.begin(); i != active_.end(); ++i) {
595 unsigned Reg = i->first->reg;
596 unsigned PhysReg = vrm_->getPhys(Reg);
597 if (!cur->overlapsFrom(*i->first, i->second))
598 continue;
599 for (unsigned j = 0; j < NumCands; ++j) {
600 unsigned Candidate = Candidates[j].first;
601 if (tri_->regsOverlap(PhysReg, Candidate)) {
602 if (NumCands > 1)
603 Conflicts[j] += getConflictWeight(cur, Reg, li_, mri_, loopInfo);
604 SLIs[j].push_back(i->first);
605 }
606 }
607 }
608
609 for (IntervalPtrs::iterator i = inactive_.begin(); i != inactive_.end(); ++i){
610 unsigned Reg = i->first->reg;
611 unsigned PhysReg = vrm_->getPhys(Reg);
612 if (!cur->overlapsFrom(*i->first, i->second-1))
613 continue;
614 for (unsigned j = 0; j < NumCands; ++j) {
615 unsigned Candidate = Candidates[j].first;
616 if (tri_->regsOverlap(PhysReg, Candidate)) {
617 if (NumCands > 1)
618 Conflicts[j] += getConflictWeight(cur, Reg, li_, mri_, loopInfo);
619 SLIs[j].push_back(i->first);
620 }
621 }
622 }
623
624 // Which is the best candidate?
625 unsigned BestCandidate = 0;
626 float MinConflicts = Conflicts[0];
627 for (unsigned i = 1; i != NumCands; ++i) {
628 if (Conflicts[i] < MinConflicts) {
629 BestCandidate = i;
630 MinConflicts = Conflicts[i];
631 }
632 }
633
634 std::copy(SLIs[BestCandidate].begin(), SLIs[BestCandidate].end(),
635 std::back_inserter(SpillIntervals));
636}
637
638namespace {
639 struct WeightCompare {
640 typedef std::pair<unsigned, float> RegWeightPair;
641 bool operator()(const RegWeightPair &LHS, const RegWeightPair &RHS) const {
642 return LHS.second < RHS.second;
643 }
644 };
645}
646
647static bool weightsAreClose(float w1, float w2) {
648 if (!NewHeuristic)
649 return false;
650
651 float diff = w1 - w2;
652 if (diff <= 0.02f) // Within 0.02f
653 return true;
654 return (diff / w2) <= 0.05f; // Within 5%.
655}
656
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000657/// assignRegOrStackSlotAtInterval - assign a register if one is available, or
658/// spill.
659void RALinScan::assignRegOrStackSlotAtInterval(LiveInterval* cur)
660{
661 DOUT << "\tallocating current interval: ";
662
Evan Chenga3186992008-04-03 16:40:27 +0000663 // This is an implicitly defined live interval, just assign any register.
Evan Cheng06b74c52008-09-18 22:38:47 +0000664 const TargetRegisterClass *RC = mri_->getRegClass(cur->reg);
Evan Chenga3186992008-04-03 16:40:27 +0000665 if (cur->empty()) {
666 unsigned physReg = cur->preference;
667 if (!physReg)
668 physReg = *RC->allocation_order_begin(*mf_);
669 DOUT << tri_->getName(physReg) << '\n';
670 // Note the register is not really in use.
671 vrm_->assignVirt2Phys(cur->reg, physReg);
Evan Chenga3186992008-04-03 16:40:27 +0000672 return;
673 }
674
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000675 PhysRegTracker backupPrt = *prt_;
676
677 std::vector<std::pair<unsigned, float> > SpillWeightsToAdd;
678 unsigned StartPosition = cur->beginNumber();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000679 const TargetRegisterClass *RCLeader = RelatedRegClasses.getLeaderValue(RC);
Evan Chengc4c75f52007-11-03 07:20:12 +0000680
681 // If this live interval is defined by a move instruction and its source is
682 // assigned a physical register that is compatible with the target register
683 // class, then we should try to assign it the same register.
684 // This can happen when the move is from a larger register class to a smaller
685 // one, e.g. X86::mov32to32_. These move instructions are not coalescable.
686 if (!cur->preference && cur->containsOneValue()) {
687 VNInfo *vni = cur->getValNumInfo(0);
688 if (vni->def && vni->def != ~1U && vni->def != ~0U) {
689 MachineInstr *CopyMI = li_->getInstructionFromIndex(vni->def);
690 unsigned SrcReg, DstReg;
Evan Cheng1fbf9c22008-04-11 17:55:47 +0000691 if (CopyMI && tii_->isMoveInstr(*CopyMI, SrcReg, DstReg)) {
Evan Chengc4c75f52007-11-03 07:20:12 +0000692 unsigned Reg = 0;
Dan Gohman1e57df32008-02-10 18:45:23 +0000693 if (TargetRegisterInfo::isPhysicalRegister(SrcReg))
Evan Chengc4c75f52007-11-03 07:20:12 +0000694 Reg = SrcReg;
695 else if (vrm_->isAssignedReg(SrcReg))
696 Reg = vrm_->getPhys(SrcReg);
697 if (Reg && allocatableRegs_[Reg] && RC->contains(Reg))
698 cur->preference = Reg;
699 }
700 }
701 }
702
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000703 // for every interval in inactive we overlap with, mark the
704 // register as not free and update spill weights.
705 for (IntervalPtrs::const_iterator i = inactive_.begin(),
706 e = inactive_.end(); i != e; ++i) {
707 unsigned Reg = i->first->reg;
Dan Gohman1e57df32008-02-10 18:45:23 +0000708 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000709 "Can only allocate virtual registers!");
Evan Cheng06b74c52008-09-18 22:38:47 +0000710 const TargetRegisterClass *RegRC = mri_->getRegClass(Reg);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000711 // If this is not in a related reg class to the register we're allocating,
712 // don't check it.
713 if (RelatedRegClasses.getLeaderValue(RegRC) == RCLeader &&
714 cur->overlapsFrom(*i->first, i->second-1)) {
715 Reg = vrm_->getPhys(Reg);
716 prt_->addRegUse(Reg);
717 SpillWeightsToAdd.push_back(std::make_pair(Reg, i->first->weight));
718 }
719 }
720
721 // Speculatively check to see if we can get a register right now. If not,
722 // we know we won't be able to by adding more constraints. If so, we can
723 // check to see if it is valid. Doing an exhaustive search of the fixed_ list
724 // is very bad (it contains all callee clobbered registers for any functions
725 // with a call), so we want to avoid doing that if possible.
726 unsigned physReg = getFreePhysReg(cur);
Evan Cheng14cc83f2008-03-11 07:19:34 +0000727 unsigned BestPhysReg = physReg;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000728 if (physReg) {
729 // We got a register. However, if it's in the fixed_ list, we might
730 // conflict with it. Check to see if we conflict with it or any of its
731 // aliases.
Evan Chengc4c75f52007-11-03 07:20:12 +0000732 SmallSet<unsigned, 8> RegAliases;
Dan Gohman1e57df32008-02-10 18:45:23 +0000733 for (const unsigned *AS = tri_->getAliasSet(physReg); *AS; ++AS)
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000734 RegAliases.insert(*AS);
735
736 bool ConflictsWithFixed = false;
737 for (unsigned i = 0, e = fixed_.size(); i != e; ++i) {
738 IntervalPtr &IP = fixed_[i];
739 if (physReg == IP.first->reg || RegAliases.count(IP.first->reg)) {
740 // Okay, this reg is on the fixed list. Check to see if we actually
741 // conflict.
742 LiveInterval *I = IP.first;
743 if (I->endNumber() > StartPosition) {
744 LiveInterval::iterator II = I->advanceTo(IP.second, StartPosition);
745 IP.second = II;
746 if (II != I->begin() && II->start > StartPosition)
747 --II;
748 if (cur->overlapsFrom(*I, II)) {
749 ConflictsWithFixed = true;
750 break;
751 }
752 }
753 }
754 }
755
756 // Okay, the register picked by our speculative getFreePhysReg call turned
757 // out to be in use. Actually add all of the conflicting fixed registers to
758 // prt so we can do an accurate query.
759 if (ConflictsWithFixed) {
760 // For every interval in fixed we overlap with, mark the register as not
761 // free and update spill weights.
762 for (unsigned i = 0, e = fixed_.size(); i != e; ++i) {
763 IntervalPtr &IP = fixed_[i];
764 LiveInterval *I = IP.first;
765
766 const TargetRegisterClass *RegRC = OneClassForEachPhysReg[I->reg];
767 if (RelatedRegClasses.getLeaderValue(RegRC) == RCLeader &&
768 I->endNumber() > StartPosition) {
769 LiveInterval::iterator II = I->advanceTo(IP.second, StartPosition);
770 IP.second = II;
771 if (II != I->begin() && II->start > StartPosition)
772 --II;
773 if (cur->overlapsFrom(*I, II)) {
774 unsigned reg = I->reg;
775 prt_->addRegUse(reg);
776 SpillWeightsToAdd.push_back(std::make_pair(reg, I->weight));
777 }
778 }
779 }
780
781 // Using the newly updated prt_ object, which includes conflicts in the
782 // future, see if there are any registers available.
783 physReg = getFreePhysReg(cur);
784 }
785 }
786
787 // Restore the physical register tracker, removing information about the
788 // future.
789 *prt_ = backupPrt;
790
791 // if we find a free register, we are done: assign this virtual to
792 // the free physical register and add this interval to the active
793 // list.
794 if (physReg) {
Bill Wendling9b0baeb2008-02-26 21:47:57 +0000795 DOUT << tri_->getName(physReg) << '\n';
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000796 vrm_->assignVirt2Phys(cur->reg, physReg);
797 prt_->addRegUse(physReg);
798 active_.push_back(std::make_pair(cur, cur->begin()));
799 handled_.push_back(cur);
800 return;
801 }
802 DOUT << "no free registers\n";
803
804 // Compile the spill weights into an array that is better for scanning.
Evan Chengc5952452008-06-20 21:45:16 +0000805 std::vector<float> SpillWeights(tri_->getNumRegs(), 0.0f);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000806 for (std::vector<std::pair<unsigned, float> >::iterator
807 I = SpillWeightsToAdd.begin(), E = SpillWeightsToAdd.end(); I != E; ++I)
Dan Gohman1e57df32008-02-10 18:45:23 +0000808 updateSpillWeights(SpillWeights, I->first, I->second, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000809
810 // for each interval in active, update spill weights.
811 for (IntervalPtrs::const_iterator i = active_.begin(), e = active_.end();
812 i != e; ++i) {
813 unsigned reg = i->first->reg;
Dan Gohman1e57df32008-02-10 18:45:23 +0000814 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000815 "Can only allocate virtual registers!");
816 reg = vrm_->getPhys(reg);
Dan Gohman1e57df32008-02-10 18:45:23 +0000817 updateSpillWeights(SpillWeights, reg, i->first->weight, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000818 }
819
820 DOUT << "\tassigning stack slot at interval "<< *cur << ":\n";
821
822 // Find a register to spill.
823 float minWeight = HUGE_VALF;
Evan Chengc5952452008-06-20 21:45:16 +0000824 unsigned minReg = 0; /*cur->preference*/; // Try the preferred register first.
825
826 bool Found = false;
827 std::vector<std::pair<unsigned,float> > RegsWeights;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000828 if (!minReg || SpillWeights[minReg] == HUGE_VALF)
829 for (TargetRegisterClass::iterator i = RC->allocation_order_begin(*mf_),
830 e = RC->allocation_order_end(*mf_); i != e; ++i) {
831 unsigned reg = *i;
Evan Chengc5952452008-06-20 21:45:16 +0000832 float regWeight = SpillWeights[reg];
833 if (minWeight > regWeight)
834 Found = true;
835 RegsWeights.push_back(std::make_pair(reg, regWeight));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000836 }
837
838 // If we didn't find a register that is spillable, try aliases?
Evan Chengc5952452008-06-20 21:45:16 +0000839 if (!Found) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000840 for (TargetRegisterClass::iterator i = RC->allocation_order_begin(*mf_),
841 e = RC->allocation_order_end(*mf_); i != e; ++i) {
842 unsigned reg = *i;
843 // No need to worry about if the alias register size < regsize of RC.
844 // We are going to spill all registers that alias it anyway.
Evan Chengc5952452008-06-20 21:45:16 +0000845 for (const unsigned* as = tri_->getAliasSet(reg); *as; ++as)
846 RegsWeights.push_back(std::make_pair(*as, SpillWeights[*as]));
Evan Cheng14cc83f2008-03-11 07:19:34 +0000847 }
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000848 }
Evan Chengc5952452008-06-20 21:45:16 +0000849
850 // Sort all potential spill candidates by weight.
851 std::sort(RegsWeights.begin(), RegsWeights.end(), WeightCompare());
852 minReg = RegsWeights[0].first;
853 minWeight = RegsWeights[0].second;
854 if (minWeight == HUGE_VALF) {
855 // All registers must have inf weight. Just grab one!
856 minReg = BestPhysReg ? BestPhysReg : *RC->allocation_order_begin(*mf_);
Owen Andersona0e65132008-07-22 22:46:49 +0000857 if (cur->weight == HUGE_VALF ||
Evan Chengaf3c4e32008-09-20 01:28:05 +0000858 li_->getApproximateInstructionCount(*cur) == 0) {
Evan Chengc5952452008-06-20 21:45:16 +0000859 // Spill a physical register around defs and uses.
860 li_->spillPhysRegAroundRegDefsUses(*cur, minReg, *vrm_);
Evan Chengaf3c4e32008-09-20 01:28:05 +0000861 assignRegOrStackSlotAtInterval(cur);
862 return;
863 }
Evan Chengc5952452008-06-20 21:45:16 +0000864 }
865
866 // Find up to 3 registers to consider as spill candidates.
867 unsigned LastCandidate = RegsWeights.size() >= 3 ? 3 : 1;
868 while (LastCandidate > 1) {
869 if (weightsAreClose(RegsWeights[LastCandidate-1].second, minWeight))
870 break;
871 --LastCandidate;
872 }
873
874 DOUT << "\t\tregister(s) with min weight(s): ";
875 DEBUG(for (unsigned i = 0; i != LastCandidate; ++i)
876 DOUT << tri_->getName(RegsWeights[i].first)
877 << " (" << RegsWeights[i].second << ")\n");
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000878
879 // if the current has the minimum weight, we need to spill it and
880 // add any added intervals back to unhandled, and restart
881 // linearscan.
882 if (cur->weight != HUGE_VALF && cur->weight <= minWeight) {
883 DOUT << "\t\t\tspilling(c): " << *cur << '\n';
Evan Chengba221ca2008-06-06 07:54:39 +0000884 float SSWeight;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000885 std::vector<LiveInterval*> added =
Evan Chengba221ca2008-06-06 07:54:39 +0000886 li_->addIntervalsForSpills(*cur, loopInfo, *vrm_, SSWeight);
887 addStackInterval(cur, ls_, li_, SSWeight, *vrm_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000888 if (added.empty())
889 return; // Early exit if all spills were folded.
890
891 // Merge added with unhandled. Note that we know that
892 // addIntervalsForSpills returns intervals sorted by their starting
893 // point.
894 for (unsigned i = 0, e = added.size(); i != e; ++i)
895 unhandled_.push(added[i]);
896 return;
897 }
898
899 ++NumBacktracks;
900
901 // push the current interval back to unhandled since we are going
902 // to re-run at least this iteration. Since we didn't modify it it
903 // should go back right in the front of the list
904 unhandled_.push(cur);
905
Dan Gohman1e57df32008-02-10 18:45:23 +0000906 assert(TargetRegisterInfo::isPhysicalRegister(minReg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000907 "did not choose a register to spill?");
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000908
Evan Chengc5952452008-06-20 21:45:16 +0000909 // We spill all intervals aliasing the register with
910 // minimum weight, rollback to the interval with the earliest
911 // start point and let the linear scan algorithm run again
912 SmallVector<LiveInterval*, 8> spillIs;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000913
Evan Chengc5952452008-06-20 21:45:16 +0000914 // Determine which intervals have to be spilled.
915 findIntervalsToSpill(cur, RegsWeights, LastCandidate, spillIs);
916
917 // Set of spilled vregs (used later to rollback properly)
918 SmallSet<unsigned, 8> spilled;
919
920 // The earliest start of a Spilled interval indicates up to where
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000921 // in handled we need to roll back
922 unsigned earliestStart = cur->beginNumber();
923
Evan Chengc5952452008-06-20 21:45:16 +0000924 // Spill live intervals of virtual regs mapped to the physical register we
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000925 // want to clear (and its aliases). We only spill those that overlap with the
926 // current interval as the rest do not affect its allocation. we also keep
927 // track of the earliest start of all spilled live intervals since this will
928 // mark our rollback point.
Evan Chengc5952452008-06-20 21:45:16 +0000929 std::vector<LiveInterval*> added;
930 while (!spillIs.empty()) {
931 LiveInterval *sli = spillIs.back();
932 spillIs.pop_back();
933 DOUT << "\t\t\tspilling(a): " << *sli << '\n';
934 earliestStart = std::min(earliestStart, sli->beginNumber());
935 float SSWeight;
936 std::vector<LiveInterval*> newIs =
937 li_->addIntervalsForSpills(*sli, loopInfo, *vrm_, SSWeight);
938 addStackInterval(sli, ls_, li_, SSWeight, *vrm_);
939 std::copy(newIs.begin(), newIs.end(), std::back_inserter(added));
940 spilled.insert(sli->reg);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000941 }
942
943 DOUT << "\t\trolling back to: " << earliestStart << '\n';
944
945 // Scan handled in reverse order up to the earliest start of a
946 // spilled live interval and undo each one, restoring the state of
947 // unhandled.
948 while (!handled_.empty()) {
949 LiveInterval* i = handled_.back();
950 // If this interval starts before t we are done.
951 if (i->beginNumber() < earliestStart)
952 break;
953 DOUT << "\t\t\tundo changes for: " << *i << '\n';
954 handled_.pop_back();
955
956 // When undoing a live interval allocation we must know if it is active or
957 // inactive to properly update the PhysRegTracker and the VirtRegMap.
958 IntervalPtrs::iterator it;
959 if ((it = FindIntervalInVector(active_, i)) != active_.end()) {
960 active_.erase(it);
Dan Gohman1e57df32008-02-10 18:45:23 +0000961 assert(!TargetRegisterInfo::isPhysicalRegister(i->reg));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000962 if (!spilled.count(i->reg))
963 unhandled_.push(i);
964 prt_->delRegUse(vrm_->getPhys(i->reg));
965 vrm_->clearVirt(i->reg);
966 } else if ((it = FindIntervalInVector(inactive_, i)) != inactive_.end()) {
967 inactive_.erase(it);
Dan Gohman1e57df32008-02-10 18:45:23 +0000968 assert(!TargetRegisterInfo::isPhysicalRegister(i->reg));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000969 if (!spilled.count(i->reg))
970 unhandled_.push(i);
971 vrm_->clearVirt(i->reg);
972 } else {
Dan Gohman1e57df32008-02-10 18:45:23 +0000973 assert(TargetRegisterInfo::isVirtualRegister(i->reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000974 "Can only allocate virtual registers!");
975 vrm_->clearVirt(i->reg);
976 unhandled_.push(i);
977 }
Evan Chengb6aa6712007-11-04 08:32:21 +0000978
979 // It interval has a preference, it must be defined by a copy. Clear the
980 // preference now since the source interval allocation may have been undone
981 // as well.
982 i->preference = 0;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000983 }
984
985 // Rewind the iterators in the active, inactive, and fixed lists back to the
986 // point we reverted to.
987 RevertVectorIteratorsTo(active_, earliestStart);
988 RevertVectorIteratorsTo(inactive_, earliestStart);
989 RevertVectorIteratorsTo(fixed_, earliestStart);
990
991 // scan the rest and undo each interval that expired after t and
992 // insert it in active (the next iteration of the algorithm will
993 // put it in inactive if required)
994 for (unsigned i = 0, e = handled_.size(); i != e; ++i) {
995 LiveInterval *HI = handled_[i];
996 if (!HI->expiredAt(earliestStart) &&
997 HI->expiredAt(cur->beginNumber())) {
998 DOUT << "\t\t\tundo changes for: " << *HI << '\n';
999 active_.push_back(std::make_pair(HI, HI->begin()));
Dan Gohman1e57df32008-02-10 18:45:23 +00001000 assert(!TargetRegisterInfo::isPhysicalRegister(HI->reg));
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001001 prt_->addRegUse(vrm_->getPhys(HI->reg));
1002 }
1003 }
1004
1005 // merge added with unhandled
1006 for (unsigned i = 0, e = added.size(); i != e; ++i)
1007 unhandled_.push(added[i]);
1008}
1009
Dale Johannesen6513fc72008-09-19 18:52:31 +00001010/// findPhysReg - get the physical register, if any, assigned to this operand.
1011/// This may be an original physical register, or the physical register which
1012/// has been assigned to a virtual register.
1013unsigned RALinScan::findPhysReg(MachineOperand &MO) {
1014 unsigned PhysReg = MO.getReg();
1015 if (PhysReg && TargetRegisterInfo::isVirtualRegister(PhysReg)) {
1016 if (!vrm_->hasPhys(PhysReg))
1017 return 0;
1018 PhysReg = vrm_->getPhys(PhysReg);
1019 }
1020 return PhysReg;
1021}
1022
Dale Johannesenf9b08792008-09-19 01:02:35 +00001023/// noEarlyClobberConflict - see whether LiveInternal cur has a conflict with
1024/// hard reg HReg because of earlyclobbers.
1025///
1026/// Earlyclobber operands may not be assigned the same register as
1027/// each other, or as earlyclobber-conflict operands (i.e. those that
1028/// are non-earlyclobbered inputs to an asm that also has earlyclobbers).
1029///
1030/// Thus there are two cases to check for:
1031/// 1. cur->reg is an earlyclobber-conflict register and HReg is an
1032/// earlyclobber register in some asm that also has cur->reg as an input.
1033/// 2. cur->reg is an earlyclobber register and HReg is an
1034/// earlyclobber-conflict input, or a different earlyclobber register,
1035/// elsewhere in some asm.
1036/// In both cases HReg can be assigned by the user, or assigned early in
1037/// register allocation.
1038///
1039/// Dropping the distinction between earlyclobber and earlyclobber-conflict,
1040/// keeping only one bit, looks promising, but two earlyclobber-conflict
1041/// operands may be assigned the same register if they happen to contain the
1042/// same value, and that implementation would prevent this.
1043///
1044bool RALinScan::noEarlyClobberConflict(LiveInterval *cur, unsigned HReg) {
1045 if (cur->overlapsEarlyClobber) {
1046 for (MachineRegisterInfo::use_iterator I = mri_->use_begin(cur->reg),
1047 E = mri_->use_end(); I!=E; ++I) {
1048 MachineInstr *MI = I.getOperand().getParent();
1049 if (MI->getOpcode()==TargetInstrInfo::INLINEASM) {
1050 for (int i = MI->getNumOperands()-1; i>=0; --i) {
1051 MachineOperand &MO = MI->getOperand(i);
Dale Johannesen6513fc72008-09-19 18:52:31 +00001052 if (MO.isRegister() && MO.isEarlyClobber()) {
1053 unsigned PhysReg = findPhysReg(MO);
1054 if (HReg==PhysReg) {
1055 DOUT << " earlyclobber conflict: " <<
Dale Johannesenf9b08792008-09-19 01:02:35 +00001056 "%reg" << cur->reg << ", " << tri_->getName(HReg) << "\n\t";
Dale Johannesen6513fc72008-09-19 18:52:31 +00001057 return false;
1058 }
Dale Johannesenf9b08792008-09-19 01:02:35 +00001059 }
1060 }
1061 }
1062 }
1063 }
1064 if (cur->isEarlyClobber) {
1065 for (MachineRegisterInfo::def_iterator I = mri_->def_begin(cur->reg),
1066 E = mri_->def_end(); I!=E; ++I) {
1067 MachineInstr *MI = I.getOperand().getParent();
1068 if (MI->getOpcode()==TargetInstrInfo::INLINEASM) {
1069 // make sure cur->reg is really clobbered in this instruction.
1070 bool earlyClobberFound = false, overlapFound = false;
1071 for (int i = MI->getNumOperands()-1; i>=0; --i) {
1072 MachineOperand &MO = MI->getOperand(i);
Dale Johannesen6513fc72008-09-19 18:52:31 +00001073 if (MO.isRegister()) {
1074 if ((MO.overlapsEarlyClobber() || MO.isEarlyClobber())) {
1075 unsigned PhysReg = findPhysReg(MO);
1076 if (HReg==PhysReg)
1077 overlapFound = true;
1078 }
Dale Johannesenf9b08792008-09-19 01:02:35 +00001079 if (MO.isEarlyClobber() && cur->reg==MO.getReg())
1080 earlyClobberFound = true;
1081 }
1082 }
1083 if (earlyClobberFound && overlapFound) {
1084 DOUT << " earlyclobber conflict: " <<
1085 "%reg" << cur->reg << ", " << tri_->getName(HReg) << "\n\t";
1086 return false;
1087 }
1088 }
1089 }
1090 }
1091 return true;
1092}
1093
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001094/// getFreePhysReg - return a free physical register for this virtual register
1095/// interval if we have one, otherwise return 0.
1096unsigned RALinScan::getFreePhysReg(LiveInterval *cur) {
Chris Lattner9f6dc2c2008-02-26 22:08:41 +00001097 SmallVector<unsigned, 256> inactiveCounts;
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001098 unsigned MaxInactiveCount = 0;
1099
Evan Cheng06b74c52008-09-18 22:38:47 +00001100 const TargetRegisterClass *RC = mri_->getRegClass(cur->reg);
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001101 const TargetRegisterClass *RCLeader = RelatedRegClasses.getLeaderValue(RC);
1102
1103 for (IntervalPtrs::iterator i = inactive_.begin(), e = inactive_.end();
1104 i != e; ++i) {
1105 unsigned reg = i->first->reg;
Dan Gohman1e57df32008-02-10 18:45:23 +00001106 assert(TargetRegisterInfo::isVirtualRegister(reg) &&
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001107 "Can only allocate virtual registers!");
1108
1109 // If this is not in a related reg class to the register we're allocating,
1110 // don't check it.
Evan Cheng06b74c52008-09-18 22:38:47 +00001111 const TargetRegisterClass *RegRC = mri_->getRegClass(reg);
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001112 if (RelatedRegClasses.getLeaderValue(RegRC) == RCLeader) {
1113 reg = vrm_->getPhys(reg);
Chris Lattner9f6dc2c2008-02-26 22:08:41 +00001114 if (inactiveCounts.size() <= reg)
1115 inactiveCounts.resize(reg+1);
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001116 ++inactiveCounts[reg];
1117 MaxInactiveCount = std::max(MaxInactiveCount, inactiveCounts[reg]);
1118 }
1119 }
1120
1121 unsigned FreeReg = 0;
1122 unsigned FreeRegInactiveCount = 0;
1123
1124 // If copy coalescer has assigned a "preferred" register, check if it's
1125 // available first.
Anton Korobeynikov6a4a9332008-02-20 12:07:57 +00001126 if (cur->preference) {
Evan Chengad28af82008-09-09 20:22:01 +00001127 if (prt_->isRegAvail(cur->preference) && RC->contains(cur->preference)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001128 DOUT << "\t\tassigned the preferred register: "
Bill Wendling9b0baeb2008-02-26 21:47:57 +00001129 << tri_->getName(cur->preference) << "\n";
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001130 return cur->preference;
1131 } else
1132 DOUT << "\t\tunable to assign the preferred register: "
Bill Wendling9b0baeb2008-02-26 21:47:57 +00001133 << tri_->getName(cur->preference) << "\n";
Anton Korobeynikov6a4a9332008-02-20 12:07:57 +00001134 }
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001135
1136 // Scan for the first available register.
1137 TargetRegisterClass::iterator I = RC->allocation_order_begin(*mf_);
1138 TargetRegisterClass::iterator E = RC->allocation_order_end(*mf_);
Evan Chengaf091bd2008-03-24 23:28:21 +00001139 assert(I != E && "No allocatable register in this register class!");
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001140 for (; I != E; ++I)
Dale Johannesenbac3c812008-09-17 21:13:11 +00001141 if (prt_->isRegAvail(*I) &&
Dale Johannesenf9b08792008-09-19 01:02:35 +00001142 noEarlyClobberConflict(cur, *I)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001143 FreeReg = *I;
Chris Lattner9f6dc2c2008-02-26 22:08:41 +00001144 if (FreeReg < inactiveCounts.size())
1145 FreeRegInactiveCount = inactiveCounts[FreeReg];
1146 else
1147 FreeRegInactiveCount = 0;
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001148 break;
1149 }
Chris Lattner9f6dc2c2008-02-26 22:08:41 +00001150
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001151 // If there are no free regs, or if this reg has the max inactive count,
1152 // return this register.
1153 if (FreeReg == 0 || FreeRegInactiveCount == MaxInactiveCount) return FreeReg;
1154
1155 // Continue scanning the registers, looking for the one with the highest
1156 // inactive count. Alkis found that this reduced register pressure very
1157 // slightly on X86 (in rev 1.94 of this file), though this should probably be
1158 // reevaluated now.
1159 for (; I != E; ++I) {
1160 unsigned Reg = *I;
Chris Lattner9f6dc2c2008-02-26 22:08:41 +00001161 if (prt_->isRegAvail(Reg) && Reg < inactiveCounts.size() &&
Dale Johannesenbac3c812008-09-17 21:13:11 +00001162 FreeRegInactiveCount < inactiveCounts[Reg] &&
Dale Johannesenf9b08792008-09-19 01:02:35 +00001163 noEarlyClobberConflict(cur, *I)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001164 FreeReg = Reg;
1165 FreeRegInactiveCount = inactiveCounts[Reg];
1166 if (FreeRegInactiveCount == MaxInactiveCount)
1167 break; // We found the one with the max inactive count.
1168 }
1169 }
1170
1171 return FreeReg;
1172}
1173
1174FunctionPass* llvm::createLinearScanRegisterAllocator() {
1175 return new RALinScan();
1176}