blob: 6feb45455d69f9b865e641a07050fdeb03eef275 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesen5907d862011-04-02 06:03:35 +000017#include "InterferenceCache.h"
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +000018#include "LiveDebugVariables.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000019#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000020#include "RegAllocBase.h"
21#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000022#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000023#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000024#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000025#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000026#include "llvm/Analysis/AliasAnalysis.h"
27#include "llvm/Function.h"
28#include "llvm/PassAnalysisSupport.h"
29#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000030#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000031#include "llvm/CodeGen/LiveIntervalAnalysis.h"
32#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000033#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000035#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000036#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000037#include "llvm/CodeGen/MachineRegisterInfo.h"
38#include "llvm/CodeGen/Passes.h"
39#include "llvm/CodeGen/RegAllocRegistry.h"
40#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000042#include "llvm/Support/Debug.h"
43#include "llvm/Support/ErrorHandling.h"
44#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000045#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000046
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000047#include <queue>
48
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000049using namespace llvm;
50
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000051STATISTIC(NumGlobalSplits, "Number of split global live ranges");
52STATISTIC(NumLocalSplits, "Number of split local live ranges");
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000053STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +000059class RAGreedy : public MachineFunctionPass,
60 public RegAllocBase,
61 private LiveRangeEdit::Delegate {
62
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000063 // context
64 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000065 BitVector ReservedRegs;
66
67 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000068 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000070 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000071 MachineLoopInfo *Loops;
72 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000073 EdgeBundles *Bundles;
74 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000075
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000076 // state
77 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000078 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000079
80 // Live ranges pass through a number of stages as we try to allocate them.
81 // Some of the stages may also create new live ranges:
82 //
83 // - Region splitting.
84 // - Per-block splitting.
85 // - Local splitting.
86 // - Spilling.
87 //
88 // Ranges produced by one of the stages skip the previous stages when they are
89 // dequeued. This improves performance because we can skip interference checks
90 // that are unlikely to give any results. It also guarantees that the live
91 // range splitting algorithm terminates, something that is otherwise hard to
92 // ensure.
93 enum LiveRangeStage {
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +000094 RS_New, ///< Never seen before.
95 RS_First, ///< First time in the queue.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000096 RS_Second, ///< Second time in the queue.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +000097 RS_Global, ///< Produced by global splitting.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000098 RS_Local, ///< Produced by local splitting.
99 RS_Spill ///< Produced by spilling.
100 };
101
102 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
103
104 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
105 return LiveRangeStage(LRStage[VirtReg.reg]);
106 }
107
108 template<typename Iterator>
109 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
110 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000111 for (;Begin != End; ++Begin) {
112 unsigned Reg = (*Begin)->reg;
113 if (LRStage[Reg] == RS_New)
114 LRStage[Reg] = NewStage;
115 }
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000116 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000117
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000118 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000119 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000120 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000121
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000122 /// Cached per-block interference maps
123 InterferenceCache IntfCache;
124
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000125 /// All basic blocks where the current register has uses.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000126 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000127
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000128 /// Global live range splitting candidate info.
129 struct GlobalSplitCandidate {
130 unsigned PhysReg;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000131 BitVector LiveBundles;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000132 SmallVector<unsigned, 8> ActiveBlocks;
133
134 void reset(unsigned Reg) {
135 PhysReg = Reg;
136 LiveBundles.clear();
137 ActiveBlocks.clear();
138 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000139 };
140
141 /// Candidate info for for each PhysReg in AllocationOrder.
142 /// This vector never shrinks, but grows to the size of the largest register
143 /// class.
144 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
145
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000146 /// For every instruction in SA->UseSlots, store the previous non-copy
147 /// instruction.
148 SmallVector<SlotIndex, 8> PrevSlot;
149
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000150public:
151 RAGreedy();
152
153 /// Return the pass name.
154 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000155 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000156 }
157
158 /// RAGreedy analysis usage.
159 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000160 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000161 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000162 virtual void enqueue(LiveInterval *LI);
163 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000164 virtual unsigned selectOrSplit(LiveInterval&,
165 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000166
167 /// Perform register allocation.
168 virtual bool runOnMachineFunction(MachineFunction &mf);
169
170 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000171
172private:
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000173 void LRE_WillEraseInstruction(MachineInstr*);
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000174 bool LRE_CanEraseVirtReg(unsigned);
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000175 void LRE_WillShrinkVirtReg(unsigned);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000176 void LRE_DidCloneVirtReg(unsigned, unsigned);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000177
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000178 float calcSpillCost();
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000179 bool addSplitConstraints(InterferenceCache::Cursor, float&);
180 void addThroughConstraints(InterferenceCache::Cursor, ArrayRef<unsigned>);
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000181 void growRegion(GlobalSplitCandidate &Cand, InterferenceCache::Cursor);
182 float calcGlobalSplitCost(GlobalSplitCandidate&, InterferenceCache::Cursor);
183 void splitAroundRegion(LiveInterval&, GlobalSplitCandidate&,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000184 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000185 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
186 SlotIndex getPrevMappedIndex(const MachineInstr*);
187 void calcPrevSlots();
188 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000189 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000190
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000191 unsigned tryAssign(LiveInterval&, AllocationOrder&,
192 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000193 unsigned tryEvict(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000194 SmallVectorImpl<LiveInterval*>&, unsigned = ~0u);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000195 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
196 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000197 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
198 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000199 unsigned trySplit(LiveInterval&, AllocationOrder&,
200 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000201};
202} // end anonymous namespace
203
204char RAGreedy::ID = 0;
205
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000206// Hysteresis to use when comparing floats.
207// This helps stabilize decisions based on float comparisons.
208const float Hysteresis = 0.98f;
209
210
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000211FunctionPass* llvm::createGreedyRegisterAllocator() {
212 return new RAGreedy();
213}
214
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000215RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_New) {
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000216 initializeLiveDebugVariablesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000217 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000218 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
219 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
220 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
221 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
222 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
223 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
224 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
225 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000226 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000227 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000228 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
229 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000230}
231
232void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
233 AU.setPreservesCFG();
234 AU.addRequired<AliasAnalysis>();
235 AU.addPreserved<AliasAnalysis>();
236 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000237 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000238 AU.addPreserved<SlotIndexes>();
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000239 AU.addRequired<LiveDebugVariables>();
240 AU.addPreserved<LiveDebugVariables>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000241 if (StrongPHIElim)
242 AU.addRequiredID(StrongPHIEliminationID);
243 AU.addRequiredTransitive<RegisterCoalescer>();
244 AU.addRequired<CalculateSpillWeights>();
245 AU.addRequired<LiveStacks>();
246 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000247 AU.addRequired<MachineDominatorTree>();
248 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000249 AU.addRequired<MachineLoopInfo>();
250 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000251 AU.addRequired<MachineLoopRanges>();
252 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000253 AU.addRequired<VirtRegMap>();
254 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000255 AU.addRequired<EdgeBundles>();
256 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000257 MachineFunctionPass::getAnalysisUsage(AU);
258}
259
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000260
261//===----------------------------------------------------------------------===//
262// LiveRangeEdit delegate methods
263//===----------------------------------------------------------------------===//
264
265void RAGreedy::LRE_WillEraseInstruction(MachineInstr *MI) {
266 // LRE itself will remove from SlotIndexes and parent basic block.
267 VRM->RemoveMachineInstrFromMaps(MI);
268}
269
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000270bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) {
271 if (unsigned PhysReg = VRM->getPhys(VirtReg)) {
272 unassign(LIS->getInterval(VirtReg), PhysReg);
273 return true;
274 }
275 // Unassigned virtreg is probably in the priority queue.
276 // RegAllocBase will erase it after dequeueing.
277 return false;
278}
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000279
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000280void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) {
281 unsigned PhysReg = VRM->getPhys(VirtReg);
282 if (!PhysReg)
283 return;
284
285 // Register is assigned, put it back on the queue for reassignment.
286 LiveInterval &LI = LIS->getInterval(VirtReg);
287 unassign(LI, PhysReg);
288 enqueue(&LI);
289}
290
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000291void RAGreedy::LRE_DidCloneVirtReg(unsigned New, unsigned Old) {
292 // LRE may clone a virtual register because dead code elimination causes it to
293 // be split into connected components. Ensure that the new register gets the
294 // same stage as the parent.
295 LRStage.grow(New);
296 LRStage[New] = LRStage[Old];
297}
298
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000299void RAGreedy::releaseMemory() {
300 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000301 LRStage.clear();
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000302 GlobalCand.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000303 RegAllocBase::releaseMemory();
304}
305
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000306void RAGreedy::enqueue(LiveInterval *LI) {
307 // Prioritize live ranges by size, assigning larger ranges first.
308 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000309 const unsigned Size = LI->getSize();
310 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000311 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
312 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000313 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000314
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000315 LRStage.grow(Reg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000316 if (LRStage[Reg] == RS_New)
317 LRStage[Reg] = RS_First;
318
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000319 if (LRStage[Reg] == RS_Second)
320 // Unsplit ranges that couldn't be allocated immediately are deferred until
321 // everything else has been allocated. Long ranges are allocated last so
322 // they are split against realistic interference.
323 Prio = (1u << 31) - Size;
324 else {
325 // Everything else is allocated in long->short order. Long ranges that don't
326 // fit should be spilled ASAP so they don't create interference.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000327 Prio = (1u << 31) + Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000328
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000329 // Boost ranges that have a physical register hint.
330 if (TargetRegisterInfo::isPhysicalRegister(VRM->getRegAllocPref(Reg)))
331 Prio |= (1u << 30);
332 }
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000333
334 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000335}
336
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000337LiveInterval *RAGreedy::dequeue() {
338 if (Queue.empty())
339 return 0;
340 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
341 Queue.pop();
342 return LI;
343}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000344
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000345
346//===----------------------------------------------------------------------===//
347// Direct Assignment
348//===----------------------------------------------------------------------===//
349
350/// tryAssign - Try to assign VirtReg to an available register.
351unsigned RAGreedy::tryAssign(LiveInterval &VirtReg,
352 AllocationOrder &Order,
353 SmallVectorImpl<LiveInterval*> &NewVRegs) {
354 Order.rewind();
355 unsigned PhysReg;
356 while ((PhysReg = Order.next()))
357 if (!checkPhysRegInterference(VirtReg, PhysReg))
358 break;
359 if (!PhysReg || Order.isHint(PhysReg))
360 return PhysReg;
361
362 // PhysReg is available. Try to evict interference from a cheaper alternative.
363 unsigned Cost = TRI->getCostPerUse(PhysReg);
364
365 // Most registers have 0 additional cost.
366 if (!Cost)
367 return PhysReg;
368
369 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is available at cost " << Cost
370 << '\n');
371 unsigned CheapReg = tryEvict(VirtReg, Order, NewVRegs, Cost);
372 return CheapReg ? CheapReg : PhysReg;
373}
374
375
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000376//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000377// Interference eviction
378//===----------------------------------------------------------------------===//
379
380/// canEvict - Return true if all interferences between VirtReg and PhysReg can
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000381/// be evicted.
382/// Return false if any interference is heavier than MaxWeight.
383/// On return, set MaxWeight to the maximal spill weight of an interference.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000384bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000385 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000386 float Weight = 0;
387 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
388 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000389 // If there is 10 or more interferences, chances are one is heavier.
390 if (Q.collectInterferingVRegs(10, MaxWeight) >= 10)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000391 return false;
392
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000393 // Check if any interfering live range is heavier than MaxWeight.
394 for (unsigned i = Q.interferingVRegs().size(); i; --i) {
395 LiveInterval *Intf = Q.interferingVRegs()[i - 1];
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000396 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
397 return false;
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000398 if (Intf->weight >= MaxWeight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000399 return false;
400 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000401 }
402 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000403 MaxWeight = Weight;
404 return true;
405}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000406
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000407/// tryEvict - Try to evict all interferences for a physreg.
408/// @param VirtReg Currently unassigned virtual register.
409/// @param Order Physregs to try.
410/// @return Physreg to assign VirtReg, or 0.
411unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
412 AllocationOrder &Order,
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000413 SmallVectorImpl<LiveInterval*> &NewVRegs,
414 unsigned CostPerUseLimit) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000415 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
416
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000417 // Keep track of the lightest single interference seen so far.
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000418 float BestWeight = VirtReg.weight;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000419 unsigned BestPhys = 0;
420
421 Order.rewind();
422 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000423 if (TRI->getCostPerUse(PhysReg) >= CostPerUseLimit)
424 continue;
425 // The first use of a register in a function has cost 1.
426 if (CostPerUseLimit == 1 && !MRI->isPhysRegUsed(PhysReg))
427 continue;
428
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000429 float Weight = BestWeight;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000430 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000431 continue;
432
433 // This is an eviction candidate.
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000434 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " interference = "
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000435 << Weight << '\n');
436 if (BestPhys && Weight >= BestWeight)
437 continue;
438
439 // Best so far.
440 BestPhys = PhysReg;
441 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000442 // Stop if the hint can be used.
443 if (Order.isHint(PhysReg))
444 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000445 }
446
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000447 if (!BestPhys)
448 return 0;
449
450 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
451 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
452 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
453 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
454 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
455 LiveInterval *Intf = Q.interferingVRegs()[i];
456 unassign(*Intf, VRM->getPhys(Intf->reg));
457 ++NumEvicted;
458 NewVRegs.push_back(Intf);
459 }
460 }
461 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000462}
463
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000464
465//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000466// Region Splitting
467//===----------------------------------------------------------------------===//
468
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000469/// addSplitConstraints - Fill out the SplitConstraints vector based on the
470/// interference pattern in Physreg and its aliases. Add the constraints to
471/// SpillPlacement and return the static cost of this split in Cost, assuming
472/// that all preferences in SplitConstraints are met.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000473/// Return false if there are no bundles with positive bias.
474bool RAGreedy::addSplitConstraints(InterferenceCache::Cursor Intf,
475 float &Cost) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000476 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000477
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000478 // Reset interference dependent info.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000479 SplitConstraints.resize(UseBlocks.size());
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000480 float StaticCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000481 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
482 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000483 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000484
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000485 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000486 Intf.moveToBlock(BC.Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000487 BC.Entry = BI.LiveIn ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
488 BC.Exit = BI.LiveOut ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000489
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000490 if (!Intf.hasInterference())
491 continue;
492
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000493 // Number of spill code instructions to insert.
494 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000495
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000496 // Interference for the live-in value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000497 if (BI.LiveIn) {
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000498 if (Intf.first() <= Indexes->getMBBStartIdx(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000499 BC.Entry = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000500 else if (Intf.first() < BI.FirstUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000501 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000502 else if (Intf.first() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000503 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000504 }
505
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000506 // Interference for the live-out value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000507 if (BI.LiveOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000508 if (Intf.last() >= SA->getLastSplitPoint(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000509 BC.Exit = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000510 else if (Intf.last() > BI.LastUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000511 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000512 else if (Intf.last() > (BI.LiveThrough ? BI.FirstUse : BI.Def))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000513 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000514 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000515
516 // Accumulate the total frequency of inserted spill code.
517 if (Ins)
518 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000519 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000520 Cost = StaticCost;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000521
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000522 // Add constraints for use-blocks. Note that these are the only constraints
523 // that may add a positive bias, it is downhill from here.
524 SpillPlacer->addConstraints(SplitConstraints);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000525 return SpillPlacer->scanActiveBundles();
526}
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000527
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000528
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000529/// addThroughConstraints - Add constraints and links to SpillPlacer from the
530/// live-through blocks in Blocks.
531void RAGreedy::addThroughConstraints(InterferenceCache::Cursor Intf,
532 ArrayRef<unsigned> Blocks) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000533 const unsigned GroupSize = 8;
534 SpillPlacement::BlockConstraint BCS[GroupSize];
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000535 unsigned TBS[GroupSize];
536 unsigned B = 0, T = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000537
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000538 for (unsigned i = 0; i != Blocks.size(); ++i) {
539 unsigned Number = Blocks[i];
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000540 Intf.moveToBlock(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000541
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000542 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000543 assert(T < GroupSize && "Array overflow");
544 TBS[T] = Number;
545 if (++T == GroupSize) {
546 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
547 T = 0;
548 }
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000549 continue;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000550 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000551
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000552 assert(B < GroupSize && "Array overflow");
553 BCS[B].Number = Number;
554
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000555 // Interference for the live-in value.
556 if (Intf.first() <= Indexes->getMBBStartIdx(Number))
557 BCS[B].Entry = SpillPlacement::MustSpill;
558 else
559 BCS[B].Entry = SpillPlacement::PrefSpill;
560
561 // Interference for the live-out value.
562 if (Intf.last() >= SA->getLastSplitPoint(Number))
563 BCS[B].Exit = SpillPlacement::MustSpill;
564 else
565 BCS[B].Exit = SpillPlacement::PrefSpill;
566
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000567 if (++B == GroupSize) {
568 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
569 SpillPlacer->addConstraints(Array);
570 B = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000571 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000572 }
573
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000574 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
575 SpillPlacer->addConstraints(Array);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000576 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000577}
578
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000579void RAGreedy::growRegion(GlobalSplitCandidate &Cand,
580 InterferenceCache::Cursor Intf) {
581 // Keep track of through blocks that have not been added to SpillPlacer.
582 BitVector Todo = SA->getThroughBlocks();
583 SmallVectorImpl<unsigned> &ActiveBlocks = Cand.ActiveBlocks;
584 unsigned AddedTo = 0;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000585#ifndef NDEBUG
586 unsigned Visited = 0;
587#endif
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000588
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000589 for (;;) {
590 ArrayRef<unsigned> NewBundles = SpillPlacer->getRecentPositive();
591 if (NewBundles.empty())
592 break;
593 // Find new through blocks in the periphery of PrefRegBundles.
594 for (int i = 0, e = NewBundles.size(); i != e; ++i) {
595 unsigned Bundle = NewBundles[i];
596 // Look at all blocks connected to Bundle in the full graph.
597 ArrayRef<unsigned> Blocks = Bundles->getBlocks(Bundle);
598 for (ArrayRef<unsigned>::iterator I = Blocks.begin(), E = Blocks.end();
599 I != E; ++I) {
600 unsigned Block = *I;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000601 if (!Todo.test(Block))
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000602 continue;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000603 Todo.reset(Block);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000604 // This is a new through block. Add it to SpillPlacer later.
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000605 ActiveBlocks.push_back(Block);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000606#ifndef NDEBUG
607 ++Visited;
608#endif
609 }
610 }
611 // Any new blocks to add?
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000612 if (ActiveBlocks.size() > AddedTo) {
613 ArrayRef<unsigned> Add(&ActiveBlocks[AddedTo],
614 ActiveBlocks.size() - AddedTo);
615 addThroughConstraints(Intf, Add);
616 AddedTo = ActiveBlocks.size();
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000617 }
618 // Perhaps iterating can enable more bundles?
619 SpillPlacer->iterate();
620 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000621 DEBUG(dbgs() << ", v=" << Visited);
622}
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000623
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000624/// calcSpillCost - Compute how expensive it would be to split the live range in
625/// SA around all use blocks instead of forming bundle regions.
626float RAGreedy::calcSpillCost() {
627 float Cost = 0;
628 const LiveInterval &LI = SA->getParent();
629 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
630 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
631 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
632 unsigned Number = BI.MBB->getNumber();
633 // We normally only need one spill instruction - a load or a store.
634 Cost += SpillPlacer->getBlockFrequency(Number);
635
636 // Unless the value is redefined in the block.
637 if (BI.LiveIn && BI.LiveOut) {
638 SlotIndex Start, Stop;
639 tie(Start, Stop) = Indexes->getMBBRange(Number);
640 LiveInterval::const_iterator I = LI.find(Start);
641 assert(I != LI.end() && "Expected live-in value");
642 // Is there a different live-out value? If so, we need an extra spill
643 // instruction.
644 if (I->end < Stop)
645 Cost += SpillPlacer->getBlockFrequency(Number);
646 }
647 }
648 return Cost;
649}
650
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000651/// calcGlobalSplitCost - Return the global split cost of following the split
652/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000653/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000654///
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000655float RAGreedy::calcGlobalSplitCost(GlobalSplitCandidate &Cand,
656 InterferenceCache::Cursor Intf) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000657 float GlobalCost = 0;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000658 const BitVector &LiveBundles = Cand.LiveBundles;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000659 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
660 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
661 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000662 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000663 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
664 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
665 unsigned Ins = 0;
666
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000667 if (BI.LiveIn)
668 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
669 if (BI.LiveOut)
670 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000671 if (Ins)
672 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000673 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000674
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000675 for (unsigned i = 0, e = Cand.ActiveBlocks.size(); i != e; ++i) {
676 unsigned Number = Cand.ActiveBlocks[i];
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000677 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
678 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000679 if (!RegIn && !RegOut)
680 continue;
681 if (RegIn && RegOut) {
682 // We need double spill code if this block has interference.
683 Intf.moveToBlock(Number);
684 if (Intf.hasInterference())
685 GlobalCost += 2*SpillPlacer->getBlockFrequency(Number);
686 continue;
687 }
688 // live-in / stack-out or stack-in live-out.
689 GlobalCost += SpillPlacer->getBlockFrequency(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000690 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000691 return GlobalCost;
692}
693
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000694/// splitAroundRegion - Split VirtReg around the region determined by
695/// LiveBundles. Make an effort to avoid interference from PhysReg.
696///
697/// The 'register' interval is going to contain as many uses as possible while
698/// avoiding interference. The 'stack' interval is the complement constructed by
699/// SplitEditor. It will contain the rest.
700///
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000701void RAGreedy::splitAroundRegion(LiveInterval &VirtReg,
702 GlobalSplitCandidate &Cand,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000703 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000704 const BitVector &LiveBundles = Cand.LiveBundles;
705
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000706 DEBUG({
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000707 dbgs() << "Splitting around region for " << PrintReg(Cand.PhysReg, TRI)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000708 << " with bundles";
709 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
710 dbgs() << " EB#" << i;
711 dbgs() << ".\n";
712 });
713
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000714 InterferenceCache::Cursor Intf(IntfCache, Cand.PhysReg);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000715 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000716 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000717
718 // Create the main cross-block interval.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +0000719 const unsigned MainIntv = SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000720
721 // First add all defs that are live out of a block.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000722 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
723 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
724 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000725 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
726 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
727
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +0000728 // Create separate intervals for isolated blocks with multiple uses.
729 if (!RegIn && !RegOut && BI.FirstUse != BI.LastUse) {
730 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " isolated.\n");
731 SE->splitSingleBlock(BI);
732 SE->selectIntv(MainIntv);
733 continue;
734 }
735
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000736 // Should the register be live out?
737 if (!BI.LiveOut || !RegOut)
738 continue;
739
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000740 SlotIndex Start, Stop;
741 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000742 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000743 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000744 << Bundles->getBundle(BI.MBB->getNumber(), 1)
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000745 << " [" << Start << ';'
746 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
747 << ") intf [" << Intf.first() << ';' << Intf.last() << ')');
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000748
749 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000750 assert((!Intf.hasInterference() || Intf.first() < Stop)
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000751 && "Bad interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000752 assert((!Intf.hasInterference() || Intf.last() > Start)
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000753 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000754
755 // Check interference leaving the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000756 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000757 // Block is interference-free.
758 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000759 if (!BI.LiveThrough) {
760 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000761 SE->useIntv(SE->enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000762 continue;
763 }
764 if (!RegIn) {
765 // Block is live-through, but entry bundle is on the stack.
766 // Reload just before the first use.
767 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000768 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000769 continue;
770 }
771 DEBUG(dbgs() << ", live-through.\n");
772 continue;
773 }
774
775 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000776 DEBUG(dbgs() << ", interference to " << Intf.last());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000777
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000778 if (!BI.LiveThrough && Intf.last() <= BI.Def) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000779 // The interference doesn't reach the outgoing segment.
780 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000781 SE->useIntv(BI.Def, Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000782 continue;
783 }
784
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000785 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000786 if (Intf.last().getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000787 // There are interference-free uses at the end of the block.
788 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000789 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000790 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000791 Intf.last().getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000792 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
793 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000794 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000795 // Only attempt a split befroe the last split point.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000796 if (Use.getBaseIndex() <= LastSplitPoint) {
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000797 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000798 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000799 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000800 assert(SegStart < LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000801 SE->useIntv(SegStart, Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000802 continue;
803 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000804 }
805
806 // Interference is after the last use.
807 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000808 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000809 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000810 }
811
812 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000813 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
814 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000815 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
816 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
817
818 // Is the register live-in?
819 if (!BI.LiveIn || !RegIn)
820 continue;
821
822 // We have an incoming register. Check for interference.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000823 SlotIndex Start, Stop;
824 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000825 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000826 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000827 << " -> BB#" << BI.MBB->getNumber() << " [" << Start << ';'
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000828 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
829 << ')');
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000830
831 // Check interference entering the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000832 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000833 // Block is interference-free.
834 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000835 if (!BI.LiveThrough) {
836 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000837 SE->useIntv(Start, SE->leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000838 continue;
839 }
840 if (!RegOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000841 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000842 // Block is live-through, but exit bundle is on the stack.
843 // Spill immediately after the last use.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000844 if (BI.LastUse < LastSplitPoint) {
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000845 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000846 SE->useIntv(Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000847 continue;
848 }
849 // The last use is after the last split point, it is probably an
850 // indirect jump.
851 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000852 << LastSplitPoint << ", stack-out.\n");
853 SlotIndex SegEnd = SE->leaveIntvBefore(LastSplitPoint);
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000854 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000855 // Run a double interval from the split to the last use.
856 // This makes it possible to spill the complement without affecting the
857 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000858 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000859 continue;
860 }
861 // Register is live-through.
862 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000863 SE->useIntv(Start, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000864 continue;
865 }
866
867 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000868 DEBUG(dbgs() << ", interference from " << Intf.first());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000869
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000870 if (!BI.LiveThrough && Intf.first() >= BI.Kill) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000871 // The interference doesn't reach the outgoing segment.
872 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000873 SE->useIntv(Start, BI.Kill);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000874 continue;
875 }
876
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000877 if (Intf.first().getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000878 // There are interference-free uses at the beginning of the block.
879 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000880 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000881 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000882 Intf.first().getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000883 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
884 SlotIndex Use = (--UI)->getBoundaryIndex();
885 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000886 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000887 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000888 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000889 continue;
890 }
891
892 // Interference is before the first use.
893 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000894 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000895 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000896 }
897
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000898 // Handle live-through blocks.
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000899 for (unsigned i = 0, e = Cand.ActiveBlocks.size(); i != e; ++i) {
900 unsigned Number = Cand.ActiveBlocks[i];
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000901 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
902 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
903 DEBUG(dbgs() << "Live through BB#" << Number << '\n');
904 if (RegIn && RegOut) {
905 Intf.moveToBlock(Number);
906 if (!Intf.hasInterference()) {
907 SE->useIntv(Indexes->getMBBStartIdx(Number),
908 Indexes->getMBBEndIdx(Number));
909 continue;
910 }
911 }
912 MachineBasicBlock *MBB = MF->getBlockNumbered(Number);
913 if (RegIn)
914 SE->leaveIntvAtTop(*MBB);
915 if (RegOut)
916 SE->enterIntvAtEnd(*MBB);
917 }
918
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000919 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000920
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000921 SmallVector<unsigned, 8> IntvMap;
922 SE->finish(&IntvMap);
923 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesen9f4b8932011-04-26 22:33:12 +0000924 unsigned OrigBlocks = SA->getNumThroughBlocks() + SA->getUseBlocks().size();
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000925
926 // Sort out the new intervals created by splitting. We get four kinds:
927 // - Remainder intervals should not be split again.
928 // - Candidate intervals can be assigned to Cand.PhysReg.
929 // - Block-local splits are candidates for local splitting.
930 // - DCE leftovers should go back on the queue.
931 for (unsigned i = 0, e = LREdit.size(); i != e; ++i) {
932 unsigned Reg = LREdit.get(i)->reg;
933
934 // Ignore old intervals from DCE.
935 if (LRStage[Reg] != RS_New)
936 continue;
937
938 // Remainder interval. Don't try splitting again, spill if it doesn't
939 // allocate.
940 if (IntvMap[i] == 0) {
941 LRStage[Reg] = RS_Global;
942 continue;
943 }
944
Jakob Stoklund Olesen9f4b8932011-04-26 22:33:12 +0000945 // Main interval. Allow repeated splitting as long as the number of live
946 // blocks is strictly decreasing.
947 if (IntvMap[i] == MainIntv) {
948 if (SA->countLiveBlocks(LREdit.get(i)) >= OrigBlocks) {
949 DEBUG(dbgs() << "Main interval covers the same " << OrigBlocks
950 << " blocks as original.\n");
951 // Don't allow repeated splitting as a safe guard against looping.
952 LRStage[Reg] = RS_Global;
953 }
954 continue;
955 }
956
957 // Other intervals are treated as new. This includes local intervals created
958 // for blocks with multiple uses, and anything created by DCE.
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000959 }
960
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000961 if (VerifyEnabled)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000962 MF->verify(this, "After splitting live range around region");
963}
964
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000965unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
966 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000967 float BestCost = Hysteresis * calcSpillCost();
968 DEBUG(dbgs() << "Cost of isolating all blocks = " << BestCost << '\n');
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000969 const unsigned NoCand = ~0u;
970 unsigned BestCand = NoCand;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000971
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000972 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000973 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
974 if (GlobalCand.size() <= Cand)
975 GlobalCand.resize(Cand+1);
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000976 GlobalCand[Cand].reset(PhysReg);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000977
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000978 SpillPlacer->prepare(GlobalCand[Cand].LiveBundles);
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000979 float Cost;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000980 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
981 if (!addSplitConstraints(Intf, Cost)) {
982 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tno positive bundles\n");
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000983 continue;
984 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000985 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tstatic = " << Cost);
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000986 if (Cost >= BestCost) {
987 DEBUG({
988 if (BestCand == NoCand)
989 dbgs() << " worse than no bundles\n";
990 else
991 dbgs() << " worse than "
992 << PrintReg(GlobalCand[BestCand].PhysReg, TRI) << '\n';
993 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000994 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000995 }
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000996 growRegion(GlobalCand[Cand], Intf);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000997
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000998 SpillPlacer->finish();
999
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001000 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001001 if (!GlobalCand[Cand].LiveBundles.any()) {
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001002 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001003 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001004 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001005
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001006 Cost += calcGlobalSplitCost(GlobalCand[Cand], Intf);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001007 DEBUG({
1008 dbgs() << ", total = " << Cost << " with bundles";
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001009 for (int i = GlobalCand[Cand].LiveBundles.find_first(); i>=0;
1010 i = GlobalCand[Cand].LiveBundles.find_next(i))
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001011 dbgs() << " EB#" << i;
1012 dbgs() << ".\n";
1013 });
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001014 if (Cost < BestCost) {
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001015 BestCand = Cand;
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001016 BestCost = Hysteresis * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001017 }
1018 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001019
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001020 if (BestCand == NoCand)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001021 return 0;
1022
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001023 splitAroundRegion(VirtReg, GlobalCand[BestCand], NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001024 return 0;
1025}
1026
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001027
1028//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001029// Local Splitting
1030//===----------------------------------------------------------------------===//
1031
1032
1033/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
1034/// in order to use PhysReg between two entries in SA->UseSlots.
1035///
1036/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
1037///
1038void RAGreedy::calcGapWeights(unsigned PhysReg,
1039 SmallVectorImpl<float> &GapWeight) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +00001040 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
1041 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001042 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1043 const unsigned NumGaps = Uses.size()-1;
1044
1045 // Start and end points for the interference check.
1046 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
1047 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
1048
1049 GapWeight.assign(NumGaps, 0.0f);
1050
1051 // Add interference from each overlapping register.
1052 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1053 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
1054 .checkInterference())
1055 continue;
1056
1057 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
1058 // so we don't need InterferenceQuery.
1059 //
1060 // Interference that overlaps an instruction is counted in both gaps
1061 // surrounding the instruction. The exception is interference before
1062 // StartIdx and after StopIdx.
1063 //
1064 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
1065 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
1066 // Skip the gaps before IntI.
1067 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
1068 if (++Gap == NumGaps)
1069 break;
1070 if (Gap == NumGaps)
1071 break;
1072
1073 // Update the gaps covered by IntI.
1074 const float weight = IntI.value()->weight;
1075 for (; Gap != NumGaps; ++Gap) {
1076 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
1077 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
1078 break;
1079 }
1080 if (Gap == NumGaps)
1081 break;
1082 }
1083 }
1084}
1085
1086/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
1087/// before MI that has a slot index. If MI is the first mapped instruction in
1088/// its block, return the block start index instead.
1089///
1090SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
1091 assert(MI && "Missing MachineInstr");
1092 const MachineBasicBlock *MBB = MI->getParent();
1093 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
1094 while (I != B)
1095 if (!(--I)->isDebugValue() && !I->isCopy())
1096 return Indexes->getInstructionIndex(I);
1097 return Indexes->getMBBStartIdx(MBB);
1098}
1099
1100/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
1101/// real non-copy instruction for each instruction in SA->UseSlots.
1102///
1103void RAGreedy::calcPrevSlots() {
1104 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1105 PrevSlot.clear();
1106 PrevSlot.reserve(Uses.size());
1107 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
1108 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
1109 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
1110 }
1111}
1112
1113/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
1114/// be beneficial to split before UseSlots[i].
1115///
1116/// 0 is always a valid split point
1117unsigned RAGreedy::nextSplitPoint(unsigned i) {
1118 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1119 const unsigned Size = Uses.size();
1120 assert(i != Size && "No split points after the end");
1121 // Allow split before i when Uses[i] is not adjacent to the previous use.
1122 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
1123 ;
1124 return i;
1125}
1126
1127/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1128/// basic block.
1129///
1130unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1131 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +00001132 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
1133 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001134
1135 // Note that it is possible to have an interval that is live-in or live-out
1136 // while only covering a single block - A phi-def can use undef values from
1137 // predecessors, and the block could be a single-block loop.
1138 // We don't bother doing anything clever about such a case, we simply assume
1139 // that the interval is continuous from FirstUse to LastUse. We should make
1140 // sure that we don't do anything illegal to such an interval, though.
1141
1142 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1143 if (Uses.size() <= 2)
1144 return 0;
1145 const unsigned NumGaps = Uses.size()-1;
1146
1147 DEBUG({
1148 dbgs() << "tryLocalSplit: ";
1149 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1150 dbgs() << ' ' << SA->UseSlots[i];
1151 dbgs() << '\n';
1152 });
1153
1154 // For every use, find the previous mapped non-copy instruction.
1155 // We use this to detect valid split points, and to estimate new interval
1156 // sizes.
1157 calcPrevSlots();
1158
1159 unsigned BestBefore = NumGaps;
1160 unsigned BestAfter = 0;
1161 float BestDiff = 0;
1162
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +00001163 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001164 SmallVector<float, 8> GapWeight;
1165
1166 Order.rewind();
1167 while (unsigned PhysReg = Order.next()) {
1168 // Keep track of the largest spill weight that would need to be evicted in
1169 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1170 calcGapWeights(PhysReg, GapWeight);
1171
1172 // Try to find the best sequence of gaps to close.
1173 // The new spill weight must be larger than any gap interference.
1174
1175 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1176 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1177
1178 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1179 // It is the spill weight that needs to be evicted.
1180 float MaxGap = GapWeight[0];
1181 for (unsigned i = 1; i != SplitAfter; ++i)
1182 MaxGap = std::max(MaxGap, GapWeight[i]);
1183
1184 for (;;) {
1185 // Live before/after split?
1186 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1187 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1188
1189 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1190 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1191 << " i=" << MaxGap);
1192
1193 // Stop before the interval gets so big we wouldn't be making progress.
1194 if (!LiveBefore && !LiveAfter) {
1195 DEBUG(dbgs() << " all\n");
1196 break;
1197 }
1198 // Should the interval be extended or shrunk?
1199 bool Shrink = true;
1200 if (MaxGap < HUGE_VALF) {
1201 // Estimate the new spill weight.
1202 //
1203 // Each instruction reads and writes the register, except the first
1204 // instr doesn't read when !FirstLive, and the last instr doesn't write
1205 // when !LastLive.
1206 //
1207 // We will be inserting copies before and after, so the total number of
1208 // reads and writes is 2 * EstUses.
1209 //
1210 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1211 2*(LiveBefore + LiveAfter);
1212
1213 // Try to guess the size of the new interval. This should be trivial,
1214 // but the slot index of an inserted copy can be a lot smaller than the
1215 // instruction it is inserted before if there are many dead indexes
1216 // between them.
1217 //
1218 // We measure the distance from the instruction before SplitBefore to
1219 // get a conservative estimate.
1220 //
1221 // The final distance can still be different if inserting copies
1222 // triggers a slot index renumbering.
1223 //
1224 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1225 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1226 // Would this split be possible to allocate?
1227 // Never allocate all gaps, we wouldn't be making progress.
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001228 DEBUG(dbgs() << " w=" << EstWeight);
1229 if (EstWeight * Hysteresis >= MaxGap) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001230 Shrink = false;
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001231 float Diff = EstWeight - MaxGap;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001232 if (Diff > BestDiff) {
1233 DEBUG(dbgs() << " (best)");
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001234 BestDiff = Hysteresis * Diff;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001235 BestBefore = SplitBefore;
1236 BestAfter = SplitAfter;
1237 }
1238 }
1239 }
1240
1241 // Try to shrink.
1242 if (Shrink) {
1243 SplitBefore = nextSplitPoint(SplitBefore);
1244 if (SplitBefore < SplitAfter) {
1245 DEBUG(dbgs() << " shrink\n");
1246 // Recompute the max when necessary.
1247 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1248 MaxGap = GapWeight[SplitBefore];
1249 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1250 MaxGap = std::max(MaxGap, GapWeight[i]);
1251 }
1252 continue;
1253 }
1254 MaxGap = 0;
1255 }
1256
1257 // Try to extend the interval.
1258 if (SplitAfter >= NumGaps) {
1259 DEBUG(dbgs() << " end\n");
1260 break;
1261 }
1262
1263 DEBUG(dbgs() << " extend\n");
1264 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1265 SplitAfter != e; ++SplitAfter)
1266 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1267 continue;
1268 }
1269 }
1270
1271 // Didn't find any candidates?
1272 if (BestBefore == NumGaps)
1273 return 0;
1274
1275 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1276 << '-' << Uses[BestAfter] << ", " << BestDiff
1277 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1278
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001279 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001280 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001281
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001282 SE->openIntv();
1283 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1284 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1285 SE->useIntv(SegStart, SegStop);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001286 SE->finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001287 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001288 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001289
1290 return 0;
1291}
1292
1293//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001294// Live Range Splitting
1295//===----------------------------------------------------------------------===//
1296
1297/// trySplit - Try to split VirtReg or one of its interferences, making it
1298/// assignable.
1299/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1300unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1301 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001302 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001303 if (LIS->intervalIsInOneMBB(VirtReg)) {
1304 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001305 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001306 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001307 }
1308
1309 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001310
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001311 // Don't iterate global splitting.
1312 // Move straight to spilling if this range was produced by a global split.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001313 if (getStage(VirtReg) >= RS_Global)
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001314 return 0;
1315
1316 SA->analyze(&VirtReg);
1317
Jakob Stoklund Olesen7d6b6a02011-05-03 20:42:13 +00001318 // FIXME: SplitAnalysis may repair broken live ranges coming from the
1319 // coalescer. That may cause the range to become allocatable which means that
1320 // tryRegionSplit won't be making progress. This check should be replaced with
1321 // an assertion when the coalescer is fixed.
1322 if (SA->didRepairRange()) {
1323 // VirtReg has changed, so all cached queries are invalid.
1324 Order.rewind();
1325 while (unsigned PhysReg = Order.next())
1326 query(VirtReg, PhysReg).clear();
1327 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1328 return PhysReg;
1329 }
1330
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001331 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001332 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1333 if (PhysReg || !NewVRegs.empty())
1334 return PhysReg;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001335
1336 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001337 SplitAnalysis::BlockPtrSet Blocks;
1338 if (SA->getMultiUseBlocks(Blocks)) {
1339 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
1340 SE->reset(LREdit);
1341 SE->splitSingleBlocks(Blocks);
1342 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Global);
1343 if (VerifyEnabled)
1344 MF->verify(this, "After splitting live range around basic blocks");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001345 }
1346
1347 // Don't assign any physregs.
1348 return 0;
1349}
1350
1351
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001352//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001353// Main Entry Point
1354//===----------------------------------------------------------------------===//
1355
1356unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001357 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001358 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001359 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +00001360 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1361 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001362
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001363 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001364 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001365
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001366 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1367
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001368 // The first time we see a live range, don't try to split or spill.
1369 // Wait until the second time, when all smaller ranges have been allocated.
1370 // This gives a better picture of the interference to split around.
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001371 LiveRangeStage Stage = getStage(VirtReg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +00001372 if (Stage == RS_First) {
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001373 LRStage[VirtReg.reg] = RS_Second;
Jakob Stoklund Olesenc1655e12011-03-19 23:02:47 +00001374 DEBUG(dbgs() << "wait for second round\n");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001375 NewVRegs.push_back(&VirtReg);
1376 return 0;
1377 }
1378
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001379 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1380
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001381 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001382 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1383 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001384 return PhysReg;
1385
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001386 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001387 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen47dbf6c2011-03-10 01:51:42 +00001388 LiveRangeEdit LRE(VirtReg, NewVRegs, this);
1389 spiller().spill(LRE);
Jakob Stoklund Olesen6094bd82011-03-29 21:20:19 +00001390 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Spill);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001391
Jakob Stoklund Olesenc46570d2011-03-16 22:56:08 +00001392 if (VerifyEnabled)
1393 MF->verify(this, "After spilling");
1394
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001395 // The live virtual register requesting allocation was spilled, so tell
1396 // the caller not to allocate anything during this round.
1397 return 0;
1398}
1399
1400bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1401 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1402 << "********** Function: "
1403 << ((Value*)mf.getFunction())->getName() << '\n');
1404
1405 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001406 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001407 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001408
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001409 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001410 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001411 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001412 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001413 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001414 Loops = &getAnalysis<MachineLoopInfo>();
1415 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001416 Bundles = &getAnalysis<EdgeBundles>();
1417 SpillPlacer = &getAnalysis<SpillPlacement>();
1418
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001419 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001420 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001421 LRStage.clear();
1422 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +00001423 IntfCache.init(MF, &PhysReg2LiveUnion[0], Indexes, TRI);
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001424
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001425 allocatePhysRegs();
1426 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001427 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001428
1429 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001430 {
1431 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001432 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001433 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001434
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +00001435 // Write out new DBG_VALUE instructions.
1436 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM);
1437
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001438 // The pass output is in VirtRegMap. Release all the transient data.
1439 releaseMemory();
1440
1441 return true;
1442}