blob: 0146a276216cd8b5f57eb5a3511f08d74ffc9d49 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000024#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000046#include <queue>
47
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000048using namespace llvm;
49
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000050STATISTIC(NumGlobalSplits, "Number of split global live ranges");
51STATISTIC(NumLocalSplits, "Number of split local live ranges");
52STATISTIC(NumReassigned, "Number of interferences reassigned");
53STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
59class RAGreedy : public MachineFunctionPass, public RegAllocBase {
60 // context
61 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000062 BitVector ReservedRegs;
63
64 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000065 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000067 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 MachineLoopInfo *Loops;
69 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 EdgeBundles *Bundles;
71 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000072
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000073 // state
74 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000075 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000076 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000077
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000078 // splitting state.
79
80 /// All basic blocks where the current register is live.
81 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
82
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +000083 /// For every instruction in SA->UseSlots, store the previous non-copy
84 /// instruction.
85 SmallVector<SlotIndex, 8> PrevSlot;
86
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000087public:
88 RAGreedy();
89
90 /// Return the pass name.
91 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000092 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000093 }
94
95 /// RAGreedy analysis usage.
96 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000097 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000098 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000099 virtual void enqueue(LiveInterval *LI);
100 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000101 virtual unsigned selectOrSplit(LiveInterval&,
102 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000103
104 /// Perform register allocation.
105 virtual bool runOnMachineFunction(MachineFunction &mf);
106
107 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000108
109private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000110 bool checkUncachedInterference(LiveInterval&, unsigned);
111 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000112 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000113 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000114 float calcInterferenceInfo(LiveInterval&, unsigned);
115 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000116 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
117 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000118 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
119 SlotIndex getPrevMappedIndex(const MachineInstr*);
120 void calcPrevSlots();
121 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000122 bool canEvictInterference(LiveInterval&, unsigned, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000123
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000124 unsigned tryReassign(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000125 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000126 unsigned tryEvict(LiveInterval&, AllocationOrder&,
127 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000128 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
129 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000130 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
131 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000132 unsigned trySplit(LiveInterval&, AllocationOrder&,
133 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000134 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
135 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000136};
137} // end anonymous namespace
138
139char RAGreedy::ID = 0;
140
141FunctionPass* llvm::createGreedyRegisterAllocator() {
142 return new RAGreedy();
143}
144
145RAGreedy::RAGreedy(): MachineFunctionPass(ID) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000146 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000147 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
148 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
149 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
150 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
151 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
152 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
153 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
154 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000155 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000156 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000157 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
158 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000159}
160
161void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
162 AU.setPreservesCFG();
163 AU.addRequired<AliasAnalysis>();
164 AU.addPreserved<AliasAnalysis>();
165 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000166 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000167 AU.addPreserved<SlotIndexes>();
168 if (StrongPHIElim)
169 AU.addRequiredID(StrongPHIEliminationID);
170 AU.addRequiredTransitive<RegisterCoalescer>();
171 AU.addRequired<CalculateSpillWeights>();
172 AU.addRequired<LiveStacks>();
173 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000174 AU.addRequired<MachineDominatorTree>();
175 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000176 AU.addRequired<MachineLoopInfo>();
177 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000178 AU.addRequired<MachineLoopRanges>();
179 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000180 AU.addRequired<VirtRegMap>();
181 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000182 AU.addRequired<EdgeBundles>();
183 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000184 MachineFunctionPass::getAnalysisUsage(AU);
185}
186
187void RAGreedy::releaseMemory() {
188 SpillerInstance.reset(0);
189 RegAllocBase::releaseMemory();
190}
191
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000192void RAGreedy::enqueue(LiveInterval *LI) {
193 // Prioritize live ranges by size, assigning larger ranges first.
194 // The queue holds (size, reg) pairs.
195 unsigned Size = LI->getSize();
196 unsigned Reg = LI->reg;
197 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
198 "Can only enqueue virtual registers");
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000199
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000200 // Boost ranges that have a physical register hint.
201 unsigned Hint = VRM->getRegAllocPref(Reg);
202 if (TargetRegisterInfo::isPhysicalRegister(Hint))
203 Size |= (1u << 30);
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000204
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000205 Queue.push(std::make_pair(Size, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000206}
207
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000208LiveInterval *RAGreedy::dequeue() {
209 if (Queue.empty())
210 return 0;
211 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
212 Queue.pop();
213 return LI;
214}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000215
216//===----------------------------------------------------------------------===//
217// Register Reassignment
218//===----------------------------------------------------------------------===//
219
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000220// Check interference without using the cache.
221bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
222 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000223 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
224 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000225 if (subQ.checkInterference())
226 return true;
227 }
228 return false;
229}
230
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000231/// getSingleInterference - Return the single interfering virtual register
232/// assigned to PhysReg. Return 0 if more than one virtual register is
233/// interfering.
234LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
235 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000236 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000237 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000238 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000239 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
240 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000241 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000242 return 0;
243 Q.collectInterferingVRegs(1);
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000244 if (!Q.seenAllInterferences())
245 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000246 Interference = Q.interferingVRegs().front();
247 }
248 }
249 return Interference;
250}
251
Andrew Trickb853e6c2010-12-09 18:15:21 +0000252// Attempt to reassign this virtual register to a different physical register.
253//
254// FIXME: we are not yet caching these "second-level" interferences discovered
255// in the sub-queries. These interferences can change with each call to
256// selectOrSplit. However, we could implement a "may-interfere" cache that
257// could be conservatively dirtied when we reassign or split.
258//
259// FIXME: This may result in a lot of alias queries. We could summarize alias
260// live intervals in their parent register's live union, but it's messy.
261bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000262 unsigned WantedPhysReg) {
263 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
264 "Can only reassign virtual registers");
265 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000266 "inconsistent phys reg assigment");
267
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000268 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
269 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000270 // Don't reassign to a WantedPhysReg alias.
271 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000272 continue;
273
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000274 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000275 continue;
276
Andrew Trickb853e6c2010-12-09 18:15:21 +0000277 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000278 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
279 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
280 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000281 unassign(InterferingVReg, OldAssign);
282 assign(InterferingVReg, PhysReg);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000283 ++NumReassigned;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000284 return true;
285 }
286 return false;
287}
288
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000289/// tryReassign - Try to reassign a single interference to a different physreg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000290/// @param VirtReg Currently unassigned virtual register.
291/// @param Order Physregs to try.
292/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000293unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order,
294 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000295 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000296
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000297 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000298 while (unsigned PhysReg = Order.next()) {
299 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
300 if (!InterferingVReg)
301 continue;
302 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
303 continue;
304 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000305 return PhysReg;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000306 }
307 return 0;
308}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000309
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000310
311//===----------------------------------------------------------------------===//
312// Interference eviction
313//===----------------------------------------------------------------------===//
314
315/// canEvict - Return true if all interferences between VirtReg and PhysReg can
316/// be evicted. Set maxWeight to the maximal spill weight of an interference.
317bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
318 unsigned Size, float &MaxWeight) {
319 float Weight = 0;
320 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
321 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
322 // If there is 10 or more interferences, chances are one is smaller.
323 if (Q.collectInterferingVRegs(10) >= 10)
324 return false;
325
326 // CHeck if any interfering live range is shorter than VirtReg.
327 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
328 LiveInterval *Intf = Q.interferingVRegs()[i];
329 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
330 return false;
331 if (Intf->getSize() <= Size)
332 return false;
333 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000334 }
335 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000336 MaxWeight = Weight;
337 return true;
338}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000339
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000340/// tryEvict - Try to evict all interferences for a physreg.
341/// @param VirtReg Currently unassigned virtual register.
342/// @param Order Physregs to try.
343/// @return Physreg to assign VirtReg, or 0.
344unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
345 AllocationOrder &Order,
346 SmallVectorImpl<LiveInterval*> &NewVRegs){
347 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
348
349 // We can only evict interference if all interfering registers are virtual and
350 // longer than VirtReg.
351 const unsigned Size = VirtReg.getSize();
352
353 // Keep track of the lightest single interference seen so far.
354 float BestWeight = 0;
355 unsigned BestPhys = 0;
356
357 Order.rewind();
358 while (unsigned PhysReg = Order.next()) {
359 float Weight = 0;
360 if (!canEvictInterference(VirtReg, PhysReg, Size, Weight))
361 continue;
362
363 // This is an eviction candidate.
364 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
365 << Weight << '\n');
366 if (BestPhys && Weight >= BestWeight)
367 continue;
368
369 // Best so far.
370 BestPhys = PhysReg;
371 BestWeight = Weight;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000372 }
373
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000374 if (!BestPhys)
375 return 0;
376
377 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
378 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
379 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
380 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
381 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
382 LiveInterval *Intf = Q.interferingVRegs()[i];
383 unassign(*Intf, VRM->getPhys(Intf->reg));
384 ++NumEvicted;
385 NewVRegs.push_back(Intf);
386 }
387 }
388 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000389}
390
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000391
392//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000393// Region Splitting
394//===----------------------------------------------------------------------===//
395
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000396/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
397/// when considering interference from PhysReg. Also compute an optimistic local
398/// cost of this interference pattern.
399///
400/// The final cost of a split is the local cost + global cost of preferences
401/// broken by SpillPlacement.
402///
403float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
404 // Reset interference dependent info.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000405 SpillConstraints.resize(SA->LiveBlocks.size());
406 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
407 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000408 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000409 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000410 BC.Entry = (BI.Uses && BI.LiveIn) ?
411 SpillPlacement::PrefReg : SpillPlacement::DontCare;
412 BC.Exit = (BI.Uses && BI.LiveOut) ?
413 SpillPlacement::PrefReg : SpillPlacement::DontCare;
414 BI.OverlapEntry = BI.OverlapExit = false;
415 }
416
417 // Add interference info from each PhysReg alias.
418 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
419 if (!query(VirtReg, *AI).checkInterference())
420 continue;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000421 LiveIntervalUnion::SegmentIter IntI =
422 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
423 if (!IntI.valid())
424 continue;
425
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000426 // Determine which blocks have interference live in or after the last split
427 // point.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000428 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
429 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000430 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
431 SlotIndex Start, Stop;
432 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
433
434 // Skip interference-free blocks.
435 if (IntI.start() >= Stop)
436 continue;
437
438 // Is the interference live-in?
439 if (BI.LiveIn) {
440 IntI.advanceTo(Start);
441 if (!IntI.valid())
442 break;
443 if (IntI.start() <= Start)
444 BC.Entry = SpillPlacement::MustSpill;
445 }
446
447 // Is the interference overlapping the last split point?
448 if (BI.LiveOut) {
449 if (IntI.stop() < BI.LastSplitPoint)
450 IntI.advanceTo(BI.LastSplitPoint.getPrevSlot());
451 if (!IntI.valid())
452 break;
453 if (IntI.start() < Stop)
454 BC.Exit = SpillPlacement::MustSpill;
455 }
456 }
457
458 // Rewind iterator and check other interferences.
459 IntI.find(VirtReg.beginIndex());
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000460 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
461 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000462 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
463 SlotIndex Start, Stop;
464 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
465
466 // Skip interference-free blocks.
467 if (IntI.start() >= Stop)
468 continue;
469
470 // Handle transparent blocks with interference separately.
471 // Transparent blocks never incur any fixed cost.
472 if (BI.LiveThrough && !BI.Uses) {
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000473 IntI.advanceTo(Start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000474 if (!IntI.valid())
475 break;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000476 if (IntI.start() >= Stop)
477 continue;
478
479 if (BC.Entry != SpillPlacement::MustSpill)
480 BC.Entry = SpillPlacement::PrefSpill;
481 if (BC.Exit != SpillPlacement::MustSpill)
482 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000483 continue;
484 }
485
486 // Now we only have blocks with uses left.
487 // Check if the interference overlaps the uses.
488 assert(BI.Uses && "Non-transparent block without any uses");
489
490 // Check interference on entry.
491 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
492 IntI.advanceTo(Start);
493 if (!IntI.valid())
494 break;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000495 // Not live in, but before the first use.
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000496 if (IntI.start() < BI.FirstUse) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000497 BC.Entry = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000498 // If the block contains a kill from an earlier split, never split
499 // again in the same block.
500 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Kill))
501 BC.Entry = SpillPlacement::MustSpill;
502 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000503 }
504
505 // Does interference overlap the uses in the entry segment
506 // [FirstUse;Kill)?
507 if (BI.LiveIn && !BI.OverlapEntry) {
508 IntI.advanceTo(BI.FirstUse);
509 if (!IntI.valid())
510 break;
511 // A live-through interval has no kill.
512 // Check [FirstUse;LastUse) instead.
513 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
514 BI.OverlapEntry = true;
515 }
516
517 // Does interference overlap the uses in the exit segment [Def;LastUse)?
518 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
519 IntI.advanceTo(BI.Def);
520 if (!IntI.valid())
521 break;
522 if (IntI.start() < BI.LastUse)
523 BI.OverlapExit = true;
524 }
525
526 // Check interference on exit.
527 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
528 // Check interference between LastUse and Stop.
529 if (BC.Exit != SpillPlacement::PrefSpill) {
530 IntI.advanceTo(BI.LastUse);
531 if (!IntI.valid())
532 break;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000533 if (IntI.start() < Stop) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000534 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000535 // Avoid splitting twice in the same block.
536 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Def))
537 BC.Exit = SpillPlacement::MustSpill;
538 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000539 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000540 }
541 }
542 }
543
544 // Accumulate a local cost of this interference pattern.
545 float LocalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000546 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
547 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000548 if (!BI.Uses)
549 continue;
550 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
551 unsigned Inserts = 0;
552
553 // Do we need spill code for the entry segment?
554 if (BI.LiveIn)
555 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
556
557 // For the exit segment?
558 if (BI.LiveOut)
559 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
560
561 // The local cost of spill code in this block is the block frequency times
562 // the number of spill instructions inserted.
563 if (Inserts)
564 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
565 }
566 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
567 << LocalCost << '\n');
568 return LocalCost;
569}
570
571/// calcGlobalSplitCost - Return the global split cost of following the split
572/// pattern in LiveBundles. This cost should be added to the local cost of the
573/// interference pattern in SpillConstraints.
574///
575float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
576 float GlobalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000577 for (unsigned i = 0, e = SpillConstraints.size(); i != e; ++i) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000578 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
579 unsigned Inserts = 0;
580 // Broken entry preference?
581 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
582 (BC.Entry == SpillPlacement::PrefReg);
583 // Broken exit preference?
584 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
585 (BC.Exit == SpillPlacement::PrefReg);
586 if (Inserts)
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000587 GlobalCost +=
588 Inserts * SpillPlacer->getBlockFrequency(SA->LiveBlocks[i].MBB);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000589 }
590 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
591 return GlobalCost;
592}
593
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000594/// splitAroundRegion - Split VirtReg around the region determined by
595/// LiveBundles. Make an effort to avoid interference from PhysReg.
596///
597/// The 'register' interval is going to contain as many uses as possible while
598/// avoiding interference. The 'stack' interval is the complement constructed by
599/// SplitEditor. It will contain the rest.
600///
601void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
602 const BitVector &LiveBundles,
603 SmallVectorImpl<LiveInterval*> &NewVRegs) {
604 DEBUG({
605 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
606 << " with bundles";
607 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
608 dbgs() << " EB#" << i;
609 dbgs() << ".\n";
610 });
611
612 // First compute interference ranges in the live blocks.
613 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
614 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000615 InterferenceRanges.resize(SA->LiveBlocks.size());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000616 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
617 if (!query(VirtReg, *AI).checkInterference())
618 continue;
619 LiveIntervalUnion::SegmentIter IntI =
620 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
621 if (!IntI.valid())
622 continue;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000623 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
624 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000625 IndexPair &IP = InterferenceRanges[i];
626 SlotIndex Start, Stop;
627 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
628 // Skip interference-free blocks.
629 if (IntI.start() >= Stop)
630 continue;
631
632 // First interference in block.
633 if (BI.LiveIn) {
634 IntI.advanceTo(Start);
635 if (!IntI.valid())
636 break;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000637 if (IntI.start() >= Stop)
638 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000639 if (!IP.first.isValid() || IntI.start() < IP.first)
640 IP.first = IntI.start();
641 }
642
643 // Last interference in block.
644 if (BI.LiveOut) {
645 IntI.advanceTo(Stop);
646 if (!IntI.valid() || IntI.start() >= Stop)
647 --IntI;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000648 if (IntI.stop() <= Start)
649 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000650 if (!IP.second.isValid() || IntI.stop() > IP.second)
651 IP.second = IntI.stop();
652 }
653 }
654 }
655
656 SmallVector<LiveInterval*, 4> SpillRegs;
657 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
658 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
659
660 // Create the main cross-block interval.
661 SE.openIntv();
662
663 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000664 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
665 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000666 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
667 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
668
669 // Should the register be live out?
670 if (!BI.LiveOut || !RegOut)
671 continue;
672
673 IndexPair &IP = InterferenceRanges[i];
674 SlotIndex Start, Stop;
675 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
676
677 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000678 << Bundles->getBundle(BI.MBB->getNumber(), 1)
679 << " intf [" << IP.first << ';' << IP.second << ')');
680
681 // The interference interval should either be invalid or overlap MBB.
682 assert((!IP.first.isValid() || IP.first < Stop) && "Bad interference");
683 assert((!IP.second.isValid() || IP.second > Start) && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000684
685 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000686 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000687 // Block is interference-free.
688 DEBUG(dbgs() << ", no interference");
689 if (!BI.Uses) {
690 assert(BI.LiveThrough && "No uses, but not live through block?");
691 // Block is live-through without interference.
692 DEBUG(dbgs() << ", no uses"
693 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
694 if (!RegIn)
695 SE.enterIntvAtEnd(*BI.MBB);
696 continue;
697 }
698 if (!BI.LiveThrough) {
699 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000700 SE.useIntv(SE.enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000701 continue;
702 }
703 if (!RegIn) {
704 // Block is live-through, but entry bundle is on the stack.
705 // Reload just before the first use.
706 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000707 SE.useIntv(SE.enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000708 continue;
709 }
710 DEBUG(dbgs() << ", live-through.\n");
711 continue;
712 }
713
714 // Block has interference.
715 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000716
717 if (!BI.LiveThrough && IP.second <= BI.Def) {
718 // The interference doesn't reach the outgoing segment.
719 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
720 SE.useIntv(BI.Def, Stop);
721 continue;
722 }
723
724
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000725 if (!BI.Uses) {
726 // No uses in block, avoid interference by reloading as late as possible.
727 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000728 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
729 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000730 continue;
731 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000732
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000733 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000734 // There are interference-free uses at the end of the block.
735 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000736 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000737 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
738 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000739 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
740 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000741 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000742 // Only attempt a split befroe the last split point.
743 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
744 DEBUG(dbgs() << ", free use at " << Use << ".\n");
745 SlotIndex SegStart = SE.enterIntvBefore(Use);
746 assert(SegStart >= IP.second && "Couldn't avoid interference");
747 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
748 SE.useIntv(SegStart, Stop);
749 continue;
750 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000751 }
752
753 // Interference is after the last use.
754 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000755 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
756 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000757 }
758
759 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000760 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
761 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000762 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
763 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
764
765 // Is the register live-in?
766 if (!BI.LiveIn || !RegIn)
767 continue;
768
769 // We have an incoming register. Check for interference.
770 IndexPair &IP = InterferenceRanges[i];
771 SlotIndex Start, Stop;
772 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
773
774 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
775 << " -> BB#" << BI.MBB->getNumber());
776
777 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000778 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000779 // Block is interference-free.
780 DEBUG(dbgs() << ", no interference");
781 if (!BI.Uses) {
782 assert(BI.LiveThrough && "No uses, but not live through block?");
783 // Block is live-through without interference.
784 if (RegOut) {
785 DEBUG(dbgs() << ", no uses, live-through.\n");
786 SE.useIntv(Start, Stop);
787 } else {
788 DEBUG(dbgs() << ", no uses, stack-out.\n");
789 SE.leaveIntvAtTop(*BI.MBB);
790 }
791 continue;
792 }
793 if (!BI.LiveThrough) {
794 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000795 SE.useIntv(Start, SE.leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000796 continue;
797 }
798 if (!RegOut) {
799 // Block is live-through, but exit bundle is on the stack.
800 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000801 if (BI.LastUse < BI.LastSplitPoint) {
802 DEBUG(dbgs() << ", uses, stack-out.\n");
803 SE.useIntv(Start, SE.leaveIntvAfter(BI.LastUse));
804 continue;
805 }
806 // The last use is after the last split point, it is probably an
807 // indirect jump.
808 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
809 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesen23cd57c2011-02-09 23:33:02 +0000810 SlotIndex SegEnd = SE.leaveIntvBefore(BI.LastSplitPoint);
811 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000812 // Run a double interval from the split to the last use.
813 // This makes it possible to spill the complement without affecting the
814 // indirect branch.
815 SE.overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000816 continue;
817 }
818 // Register is live-through.
819 DEBUG(dbgs() << ", uses, live-through.\n");
820 SE.useIntv(Start, Stop);
821 continue;
822 }
823
824 // Block has interference.
825 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000826
827 if (!BI.LiveThrough && IP.first >= BI.Kill) {
828 // The interference doesn't reach the outgoing segment.
829 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
830 SE.useIntv(Start, BI.Kill);
831 continue;
832 }
833
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000834 if (!BI.Uses) {
835 // No uses in block, avoid interference by spilling as soon as possible.
836 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000837 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
838 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000839 continue;
840 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000841 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000842 // There are interference-free uses at the beginning of the block.
843 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000844 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000845 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
846 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000847 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
848 SlotIndex Use = (--UI)->getBoundaryIndex();
849 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000850 SlotIndex SegEnd = SE.leaveIntvAfter(Use);
851 assert(SegEnd <= IP.first && "Couldn't avoid interference");
852 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000853 continue;
854 }
855
856 // Interference is before the first use.
857 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000858 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
859 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000860 }
861
862 SE.closeIntv();
863
864 // FIXME: Should we be more aggressive about splitting the stack region into
865 // per-block segments? The current approach allows the stack region to
866 // separate into connected components. Some components may be allocatable.
867 SE.finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000868 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000869
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000870 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000871 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000872
873#ifndef NDEBUG
874 // Make sure that at least one of the new intervals can allocate to PhysReg.
875 // That was the whole point of splitting the live range.
876 bool found = false;
877 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
878 ++I)
879 if (!checkUncachedInterference(**I, PhysReg)) {
880 found = true;
881 break;
882 }
883 assert(found && "No allocatable intervals after pointless splitting");
884#endif
885 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000886}
887
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000888unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
889 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000890 BitVector LiveBundles, BestBundles;
891 float BestCost = 0;
892 unsigned BestReg = 0;
893 Order.rewind();
894 while (unsigned PhysReg = Order.next()) {
895 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
896 if (BestReg && Cost >= BestCost)
897 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000898
899 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
900 // No live bundles, defer to splitSingleBlocks().
901 if (!LiveBundles.any())
902 continue;
903
904 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000905 if (!BestReg || Cost < BestCost) {
906 BestReg = PhysReg;
907 BestCost = Cost;
908 BestBundles.swap(LiveBundles);
909 }
910 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000911
912 if (!BestReg)
913 return 0;
914
915 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000916 return 0;
917}
918
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000919
920//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000921// Local Splitting
922//===----------------------------------------------------------------------===//
923
924
925/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
926/// in order to use PhysReg between two entries in SA->UseSlots.
927///
928/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
929///
930void RAGreedy::calcGapWeights(unsigned PhysReg,
931 SmallVectorImpl<float> &GapWeight) {
932 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
933 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
934 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
935 const unsigned NumGaps = Uses.size()-1;
936
937 // Start and end points for the interference check.
938 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
939 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
940
941 GapWeight.assign(NumGaps, 0.0f);
942
943 // Add interference from each overlapping register.
944 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
945 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
946 .checkInterference())
947 continue;
948
949 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
950 // so we don't need InterferenceQuery.
951 //
952 // Interference that overlaps an instruction is counted in both gaps
953 // surrounding the instruction. The exception is interference before
954 // StartIdx and after StopIdx.
955 //
956 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
957 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
958 // Skip the gaps before IntI.
959 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
960 if (++Gap == NumGaps)
961 break;
962 if (Gap == NumGaps)
963 break;
964
965 // Update the gaps covered by IntI.
966 const float weight = IntI.value()->weight;
967 for (; Gap != NumGaps; ++Gap) {
968 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
969 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
970 break;
971 }
972 if (Gap == NumGaps)
973 break;
974 }
975 }
976}
977
978/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
979/// before MI that has a slot index. If MI is the first mapped instruction in
980/// its block, return the block start index instead.
981///
982SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
983 assert(MI && "Missing MachineInstr");
984 const MachineBasicBlock *MBB = MI->getParent();
985 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
986 while (I != B)
987 if (!(--I)->isDebugValue() && !I->isCopy())
988 return Indexes->getInstructionIndex(I);
989 return Indexes->getMBBStartIdx(MBB);
990}
991
992/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
993/// real non-copy instruction for each instruction in SA->UseSlots.
994///
995void RAGreedy::calcPrevSlots() {
996 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
997 PrevSlot.clear();
998 PrevSlot.reserve(Uses.size());
999 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
1000 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
1001 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
1002 }
1003}
1004
1005/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
1006/// be beneficial to split before UseSlots[i].
1007///
1008/// 0 is always a valid split point
1009unsigned RAGreedy::nextSplitPoint(unsigned i) {
1010 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1011 const unsigned Size = Uses.size();
1012 assert(i != Size && "No split points after the end");
1013 // Allow split before i when Uses[i] is not adjacent to the previous use.
1014 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
1015 ;
1016 return i;
1017}
1018
1019/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1020/// basic block.
1021///
1022unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1023 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1024 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
1025 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
1026
1027 // Note that it is possible to have an interval that is live-in or live-out
1028 // while only covering a single block - A phi-def can use undef values from
1029 // predecessors, and the block could be a single-block loop.
1030 // We don't bother doing anything clever about such a case, we simply assume
1031 // that the interval is continuous from FirstUse to LastUse. We should make
1032 // sure that we don't do anything illegal to such an interval, though.
1033
1034 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1035 if (Uses.size() <= 2)
1036 return 0;
1037 const unsigned NumGaps = Uses.size()-1;
1038
1039 DEBUG({
1040 dbgs() << "tryLocalSplit: ";
1041 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1042 dbgs() << ' ' << SA->UseSlots[i];
1043 dbgs() << '\n';
1044 });
1045
1046 // For every use, find the previous mapped non-copy instruction.
1047 // We use this to detect valid split points, and to estimate new interval
1048 // sizes.
1049 calcPrevSlots();
1050
1051 unsigned BestBefore = NumGaps;
1052 unsigned BestAfter = 0;
1053 float BestDiff = 0;
1054
1055 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB);
1056 SmallVector<float, 8> GapWeight;
1057
1058 Order.rewind();
1059 while (unsigned PhysReg = Order.next()) {
1060 // Keep track of the largest spill weight that would need to be evicted in
1061 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1062 calcGapWeights(PhysReg, GapWeight);
1063
1064 // Try to find the best sequence of gaps to close.
1065 // The new spill weight must be larger than any gap interference.
1066
1067 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1068 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1069
1070 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1071 // It is the spill weight that needs to be evicted.
1072 float MaxGap = GapWeight[0];
1073 for (unsigned i = 1; i != SplitAfter; ++i)
1074 MaxGap = std::max(MaxGap, GapWeight[i]);
1075
1076 for (;;) {
1077 // Live before/after split?
1078 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1079 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1080
1081 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1082 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1083 << " i=" << MaxGap);
1084
1085 // Stop before the interval gets so big we wouldn't be making progress.
1086 if (!LiveBefore && !LiveAfter) {
1087 DEBUG(dbgs() << " all\n");
1088 break;
1089 }
1090 // Should the interval be extended or shrunk?
1091 bool Shrink = true;
1092 if (MaxGap < HUGE_VALF) {
1093 // Estimate the new spill weight.
1094 //
1095 // Each instruction reads and writes the register, except the first
1096 // instr doesn't read when !FirstLive, and the last instr doesn't write
1097 // when !LastLive.
1098 //
1099 // We will be inserting copies before and after, so the total number of
1100 // reads and writes is 2 * EstUses.
1101 //
1102 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1103 2*(LiveBefore + LiveAfter);
1104
1105 // Try to guess the size of the new interval. This should be trivial,
1106 // but the slot index of an inserted copy can be a lot smaller than the
1107 // instruction it is inserted before if there are many dead indexes
1108 // between them.
1109 //
1110 // We measure the distance from the instruction before SplitBefore to
1111 // get a conservative estimate.
1112 //
1113 // The final distance can still be different if inserting copies
1114 // triggers a slot index renumbering.
1115 //
1116 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1117 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1118 // Would this split be possible to allocate?
1119 // Never allocate all gaps, we wouldn't be making progress.
1120 float Diff = EstWeight - MaxGap;
1121 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1122 if (Diff > 0) {
1123 Shrink = false;
1124 if (Diff > BestDiff) {
1125 DEBUG(dbgs() << " (best)");
1126 BestDiff = Diff;
1127 BestBefore = SplitBefore;
1128 BestAfter = SplitAfter;
1129 }
1130 }
1131 }
1132
1133 // Try to shrink.
1134 if (Shrink) {
1135 SplitBefore = nextSplitPoint(SplitBefore);
1136 if (SplitBefore < SplitAfter) {
1137 DEBUG(dbgs() << " shrink\n");
1138 // Recompute the max when necessary.
1139 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1140 MaxGap = GapWeight[SplitBefore];
1141 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1142 MaxGap = std::max(MaxGap, GapWeight[i]);
1143 }
1144 continue;
1145 }
1146 MaxGap = 0;
1147 }
1148
1149 // Try to extend the interval.
1150 if (SplitAfter >= NumGaps) {
1151 DEBUG(dbgs() << " end\n");
1152 break;
1153 }
1154
1155 DEBUG(dbgs() << " extend\n");
1156 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1157 SplitAfter != e; ++SplitAfter)
1158 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1159 continue;
1160 }
1161 }
1162
1163 // Didn't find any candidates?
1164 if (BestBefore == NumGaps)
1165 return 0;
1166
1167 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1168 << '-' << Uses[BestAfter] << ", " << BestDiff
1169 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1170
1171 SmallVector<LiveInterval*, 4> SpillRegs;
1172 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1173 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
1174
1175 SE.openIntv();
1176 SlotIndex SegStart = SE.enterIntvBefore(Uses[BestBefore]);
1177 SlotIndex SegStop = SE.leaveIntvAfter(Uses[BestAfter]);
1178 SE.useIntv(SegStart, SegStop);
1179 SE.closeIntv();
1180 SE.finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001181 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001182
1183 return 0;
1184}
1185
1186//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001187// Live Range Splitting
1188//===----------------------------------------------------------------------===//
1189
1190/// trySplit - Try to split VirtReg or one of its interferences, making it
1191/// assignable.
1192/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1193unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1194 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001195 SA->analyze(&VirtReg);
1196
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001197 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001198 if (LIS->intervalIsInOneMBB(VirtReg)) {
1199 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001200 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001201 }
1202
1203 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001204
1205 // First try to split around a region spanning multiple blocks.
1206 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1207 if (PhysReg || !NewVRegs.empty())
1208 return PhysReg;
1209
1210 // Then isolate blocks with multiple uses.
1211 SplitAnalysis::BlockPtrSet Blocks;
1212 if (SA->getMultiUseBlocks(Blocks)) {
1213 SmallVector<LiveInterval*, 4> SpillRegs;
1214 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1215 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +00001216 if (VerifyEnabled)
1217 MF->verify(this, "After splitting live range around basic blocks");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001218 }
1219
1220 // Don't assign any physregs.
1221 return 0;
1222}
1223
1224
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001225//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001226// Spilling
1227//===----------------------------------------------------------------------===//
1228
1229/// calcInterferenceWeight - Calculate the combined spill weight of
1230/// interferences when assigning VirtReg to PhysReg.
1231float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
1232 float Sum = 0;
1233 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1234 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1235 Q.collectInterferingVRegs();
1236 if (Q.seenUnspillableVReg())
1237 return HUGE_VALF;
1238 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
1239 Sum += Q.interferingVRegs()[i]->weight;
1240 }
1241 return Sum;
1242}
1243
1244/// trySpillInterferences - Try to spill interfering registers instead of the
1245/// current one. Only do it if the accumulated spill weight is smaller than the
1246/// current spill weight.
1247unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
1248 AllocationOrder &Order,
1249 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1250 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
1251 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +00001252 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001253
1254 Order.rewind();
1255 while (unsigned PhysReg = Order.next()) {
1256 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
1257 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
1258 continue;
1259 if (!BestPhys || Weight < BestWeight)
1260 BestPhys = PhysReg, BestWeight = Weight;
1261 }
1262
1263 // No candidates found.
1264 if (!BestPhys)
1265 return 0;
1266
1267 // Collect all interfering registers.
1268 SmallVector<LiveInterval*, 8> Spills;
1269 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
1270 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1271 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
1272 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
1273 LiveInterval *VReg = Q.interferingVRegs()[i];
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +00001274 unassign(*VReg, *AI);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001275 }
1276 }
1277
1278 // Spill them all.
1279 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
1280 << BestWeight << '\n');
1281 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
1282 spiller().spill(Spills[i], NewVRegs, Spills);
1283 return BestPhys;
1284}
1285
1286
1287//===----------------------------------------------------------------------===//
1288// Main Entry Point
1289//===----------------------------------------------------------------------===//
1290
1291unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001292 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001293 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001294 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1295 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001296 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001297 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001298 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001299
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001300 if (unsigned PhysReg = tryReassign(VirtReg, Order, NewVRegs))
1301 return PhysReg;
1302
1303 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001304 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001305
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001306 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1307
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001308 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001309 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1310 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001311 return PhysReg;
1312
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001313 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001314 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001315 if (PhysReg)
1316 return PhysReg;
1317
1318 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001319 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001320 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001321 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001322
1323 // The live virtual register requesting allocation was spilled, so tell
1324 // the caller not to allocate anything during this round.
1325 return 0;
1326}
1327
1328bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1329 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1330 << "********** Function: "
1331 << ((Value*)mf.getFunction())->getName() << '\n');
1332
1333 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001334 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001335 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001336
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001337 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001338 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001339 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001340 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001341 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001342 Loops = &getAnalysis<MachineLoopInfo>();
1343 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001344 Bundles = &getAnalysis<EdgeBundles>();
1345 SpillPlacer = &getAnalysis<SpillPlacement>();
1346
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001347 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001348
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001349 allocatePhysRegs();
1350 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001351 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001352
1353 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001354 {
1355 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001356 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001357 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001358
1359 // The pass output is in VirtRegMap. Release all the transient data.
1360 releaseMemory();
1361
1362 return true;
1363}