blob: 15d8cbac01528f710cd2d183d1e7bc44482440d1 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesen5907d862011-04-02 06:03:35 +000017#include "InterferenceCache.h"
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +000018#include "LiveDebugVariables.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000019#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000020#include "RegAllocBase.h"
21#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000022#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000023#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000024#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000025#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000026#include "llvm/Analysis/AliasAnalysis.h"
27#include "llvm/Function.h"
28#include "llvm/PassAnalysisSupport.h"
29#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000030#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000031#include "llvm/CodeGen/LiveIntervalAnalysis.h"
32#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000033#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000035#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000036#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000037#include "llvm/CodeGen/MachineRegisterInfo.h"
38#include "llvm/CodeGen/Passes.h"
39#include "llvm/CodeGen/RegAllocRegistry.h"
40#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000042#include "llvm/Support/Debug.h"
43#include "llvm/Support/ErrorHandling.h"
44#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000045#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000046
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000047#include <queue>
48
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000049using namespace llvm;
50
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000051STATISTIC(NumGlobalSplits, "Number of split global live ranges");
52STATISTIC(NumLocalSplits, "Number of split local live ranges");
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000053STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +000059class RAGreedy : public MachineFunctionPass,
60 public RegAllocBase,
61 private LiveRangeEdit::Delegate {
62
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000063 // context
64 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000065 BitVector ReservedRegs;
66
67 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000068 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000070 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000071 MachineLoopInfo *Loops;
72 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000073 EdgeBundles *Bundles;
74 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf42b6612011-05-06 18:00:02 +000075 LiveDebugVariables *DebugVars;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000076
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000077 // state
78 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000079 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000080
81 // Live ranges pass through a number of stages as we try to allocate them.
82 // Some of the stages may also create new live ranges:
83 //
84 // - Region splitting.
85 // - Per-block splitting.
86 // - Local splitting.
87 // - Spilling.
88 //
89 // Ranges produced by one of the stages skip the previous stages when they are
90 // dequeued. This improves performance because we can skip interference checks
91 // that are unlikely to give any results. It also guarantees that the live
92 // range splitting algorithm terminates, something that is otherwise hard to
93 // ensure.
94 enum LiveRangeStage {
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +000095 RS_New, ///< Never seen before.
96 RS_First, ///< First time in the queue.
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +000097 RS_Second, ///< Second time in the queue.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +000098 RS_Global, ///< Produced by global splitting.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000099 RS_Local, ///< Produced by local splitting.
100 RS_Spill ///< Produced by spilling.
101 };
102
Jakob Stoklund Olesenb8d936b2011-05-25 23:58:36 +0000103 static const char *const StageName[];
104
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000105 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
106
107 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
108 return LiveRangeStage(LRStage[VirtReg.reg]);
109 }
110
111 template<typename Iterator>
112 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
113 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000114 for (;Begin != End; ++Begin) {
115 unsigned Reg = (*Begin)->reg;
116 if (LRStage[Reg] == RS_New)
117 LRStage[Reg] = NewStage;
118 }
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000119 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000120
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000121 // Eviction. Sometimes an assigned live range can be evicted without
122 // conditions, but other times it must be split after being evicted to avoid
123 // infinite loops.
124 enum CanEvict {
125 CE_Never, ///< Can never evict.
126 CE_Always, ///< Can always evict.
127 CE_WithSplit ///< Can evict only if range is also split or spilled.
128 };
129
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000130 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000131 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000132 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000133
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000134 /// Cached per-block interference maps
135 InterferenceCache IntfCache;
136
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000137 /// All basic blocks where the current register has uses.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000138 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000139
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000140 /// Global live range splitting candidate info.
141 struct GlobalSplitCandidate {
142 unsigned PhysReg;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000143 BitVector LiveBundles;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000144 SmallVector<unsigned, 8> ActiveBlocks;
145
146 void reset(unsigned Reg) {
147 PhysReg = Reg;
148 LiveBundles.clear();
149 ActiveBlocks.clear();
150 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000151 };
152
153 /// Candidate info for for each PhysReg in AllocationOrder.
154 /// This vector never shrinks, but grows to the size of the largest register
155 /// class.
156 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
157
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000158 /// For every instruction in SA->UseSlots, store the previous non-copy
159 /// instruction.
160 SmallVector<SlotIndex, 8> PrevSlot;
161
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000162public:
163 RAGreedy();
164
165 /// Return the pass name.
166 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000167 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000168 }
169
170 /// RAGreedy analysis usage.
171 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000172 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000173 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000174 virtual void enqueue(LiveInterval *LI);
175 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000176 virtual unsigned selectOrSplit(LiveInterval&,
177 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000178
179 /// Perform register allocation.
180 virtual bool runOnMachineFunction(MachineFunction &mf);
181
182 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000183
184private:
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000185 void LRE_WillEraseInstruction(MachineInstr*);
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000186 bool LRE_CanEraseVirtReg(unsigned);
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000187 void LRE_WillShrinkVirtReg(unsigned);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000188 void LRE_DidCloneVirtReg(unsigned, unsigned);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000189
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000190 float calcSpillCost();
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000191 bool addSplitConstraints(InterferenceCache::Cursor, float&);
192 void addThroughConstraints(InterferenceCache::Cursor, ArrayRef<unsigned>);
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000193 void growRegion(GlobalSplitCandidate &Cand, InterferenceCache::Cursor);
194 float calcGlobalSplitCost(GlobalSplitCandidate&, InterferenceCache::Cursor);
195 void splitAroundRegion(LiveInterval&, GlobalSplitCandidate&,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000196 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000197 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
198 SlotIndex getPrevMappedIndex(const MachineInstr*);
199 void calcPrevSlots();
200 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000201 CanEvict canEvict(LiveInterval &A, LiveInterval &B);
202 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000203
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000204 unsigned tryAssign(LiveInterval&, AllocationOrder&,
205 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000206 unsigned tryEvict(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000207 SmallVectorImpl<LiveInterval*>&, unsigned = ~0u);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000208 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
209 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000210 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
211 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000212 unsigned trySplit(LiveInterval&, AllocationOrder&,
213 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000214};
215} // end anonymous namespace
216
217char RAGreedy::ID = 0;
218
Jakob Stoklund Olesenb8d936b2011-05-25 23:58:36 +0000219#ifndef NDEBUG
220const char *const RAGreedy::StageName[] = {
221 "RS_New",
222 "RS_First",
223 "RS_Second",
224 "RS_Global",
225 "RS_Local",
226 "RS_Spill"
227};
228#endif
229
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000230// Hysteresis to use when comparing floats.
231// This helps stabilize decisions based on float comparisons.
232const float Hysteresis = 0.98f;
233
234
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000235FunctionPass* llvm::createGreedyRegisterAllocator() {
236 return new RAGreedy();
237}
238
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000239RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_New) {
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000240 initializeLiveDebugVariablesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000241 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000242 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
243 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
244 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
245 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
246 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
247 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
248 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
249 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000250 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000251 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000252 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
253 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000254}
255
256void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
257 AU.setPreservesCFG();
258 AU.addRequired<AliasAnalysis>();
259 AU.addPreserved<AliasAnalysis>();
260 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000261 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000262 AU.addPreserved<SlotIndexes>();
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000263 AU.addRequired<LiveDebugVariables>();
264 AU.addPreserved<LiveDebugVariables>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000265 if (StrongPHIElim)
266 AU.addRequiredID(StrongPHIEliminationID);
267 AU.addRequiredTransitive<RegisterCoalescer>();
268 AU.addRequired<CalculateSpillWeights>();
269 AU.addRequired<LiveStacks>();
270 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000271 AU.addRequired<MachineDominatorTree>();
272 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000273 AU.addRequired<MachineLoopInfo>();
274 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000275 AU.addRequired<MachineLoopRanges>();
276 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000277 AU.addRequired<VirtRegMap>();
278 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000279 AU.addRequired<EdgeBundles>();
280 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000281 MachineFunctionPass::getAnalysisUsage(AU);
282}
283
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000284
285//===----------------------------------------------------------------------===//
286// LiveRangeEdit delegate methods
287//===----------------------------------------------------------------------===//
288
289void RAGreedy::LRE_WillEraseInstruction(MachineInstr *MI) {
290 // LRE itself will remove from SlotIndexes and parent basic block.
291 VRM->RemoveMachineInstrFromMaps(MI);
292}
293
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000294bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) {
295 if (unsigned PhysReg = VRM->getPhys(VirtReg)) {
296 unassign(LIS->getInterval(VirtReg), PhysReg);
297 return true;
298 }
299 // Unassigned virtreg is probably in the priority queue.
300 // RegAllocBase will erase it after dequeueing.
301 return false;
302}
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000303
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000304void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) {
305 unsigned PhysReg = VRM->getPhys(VirtReg);
306 if (!PhysReg)
307 return;
308
309 // Register is assigned, put it back on the queue for reassignment.
310 LiveInterval &LI = LIS->getInterval(VirtReg);
311 unassign(LI, PhysReg);
312 enqueue(&LI);
313}
314
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000315void RAGreedy::LRE_DidCloneVirtReg(unsigned New, unsigned Old) {
316 // LRE may clone a virtual register because dead code elimination causes it to
317 // be split into connected components. Ensure that the new register gets the
318 // same stage as the parent.
319 LRStage.grow(New);
320 LRStage[New] = LRStage[Old];
321}
322
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000323void RAGreedy::releaseMemory() {
324 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000325 LRStage.clear();
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000326 GlobalCand.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000327 RegAllocBase::releaseMemory();
328}
329
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000330void RAGreedy::enqueue(LiveInterval *LI) {
331 // Prioritize live ranges by size, assigning larger ranges first.
332 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000333 const unsigned Size = LI->getSize();
334 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000335 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
336 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000337 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000338
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000339 LRStage.grow(Reg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000340 if (LRStage[Reg] == RS_New)
341 LRStage[Reg] = RS_First;
342
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000343 if (LRStage[Reg] == RS_Second)
344 // Unsplit ranges that couldn't be allocated immediately are deferred until
345 // everything else has been allocated. Long ranges are allocated last so
346 // they are split against realistic interference.
347 Prio = (1u << 31) - Size;
348 else {
349 // Everything else is allocated in long->short order. Long ranges that don't
350 // fit should be spilled ASAP so they don't create interference.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000351 Prio = (1u << 31) + Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000352
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000353 // Boost ranges that have a physical register hint.
354 if (TargetRegisterInfo::isPhysicalRegister(VRM->getRegAllocPref(Reg)))
355 Prio |= (1u << 30);
356 }
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000357
358 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000359}
360
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000361LiveInterval *RAGreedy::dequeue() {
362 if (Queue.empty())
363 return 0;
364 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
365 Queue.pop();
366 return LI;
367}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000368
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000369
370//===----------------------------------------------------------------------===//
371// Direct Assignment
372//===----------------------------------------------------------------------===//
373
374/// tryAssign - Try to assign VirtReg to an available register.
375unsigned RAGreedy::tryAssign(LiveInterval &VirtReg,
376 AllocationOrder &Order,
377 SmallVectorImpl<LiveInterval*> &NewVRegs) {
378 Order.rewind();
379 unsigned PhysReg;
380 while ((PhysReg = Order.next()))
381 if (!checkPhysRegInterference(VirtReg, PhysReg))
382 break;
383 if (!PhysReg || Order.isHint(PhysReg))
384 return PhysReg;
385
386 // PhysReg is available. Try to evict interference from a cheaper alternative.
387 unsigned Cost = TRI->getCostPerUse(PhysReg);
388
389 // Most registers have 0 additional cost.
390 if (!Cost)
391 return PhysReg;
392
393 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is available at cost " << Cost
394 << '\n');
395 unsigned CheapReg = tryEvict(VirtReg, Order, NewVRegs, Cost);
396 return CheapReg ? CheapReg : PhysReg;
397}
398
399
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000400//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000401// Interference eviction
402//===----------------------------------------------------------------------===//
403
Jakob Stoklund Olesenb8d936b2011-05-25 23:58:36 +0000404/// canEvict - determine if A can evict the assigned live range B. The eviction
405/// policy defined by this function together with the allocation order defined
406/// by enqueue() decides which registers ultimately end up being split and
407/// spilled.
408///
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000409/// This function must define a non-circular relation when it returns CE_Always,
410/// otherwise infinite eviction loops are possible. When evicting a <= RS_Second
411/// range, it is possible to return CE_WithSplit which forces the evicted
412/// register to be split or spilled before it can evict anything again. That
413/// guarantees progress.
414RAGreedy::CanEvict RAGreedy::canEvict(LiveInterval &A, LiveInterval &B) {
415 return A.weight > B.weight ? CE_Always : CE_Never;
Jakob Stoklund Olesenb8d936b2011-05-25 23:58:36 +0000416}
417
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000418/// canEvict - Return true if all interferences between VirtReg and PhysReg can
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000419/// be evicted.
420/// Return false if any interference is heavier than MaxWeight.
421/// On return, set MaxWeight to the maximal spill weight of an interference.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000422bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000423 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000424 float Weight = 0;
425 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
426 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000427 // If there is 10 or more interferences, chances are one is heavier.
428 if (Q.collectInterferingVRegs(10, MaxWeight) >= 10)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000429 return false;
430
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000431 // Check if any interfering live range is heavier than MaxWeight.
432 for (unsigned i = Q.interferingVRegs().size(); i; --i) {
433 LiveInterval *Intf = Q.interferingVRegs()[i - 1];
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000434 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
435 return false;
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000436 if (Intf->weight >= MaxWeight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000437 return false;
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000438 switch (canEvict(VirtReg, *Intf)) {
439 case CE_Always:
440 break;
441 case CE_Never:
Jakob Stoklund Olesend2056e52011-05-31 21:02:44 +0000442 return false;
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000443 case CE_WithSplit:
444 if (getStage(*Intf) > RS_Second)
445 return false;
446 break;
447 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000448 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000449 }
450 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000451 MaxWeight = Weight;
452 return true;
453}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000454
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000455/// tryEvict - Try to evict all interferences for a physreg.
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000456/// @param VirtReg Currently unassigned virtual register.
457/// @param Order Physregs to try.
458/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000459unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
460 AllocationOrder &Order,
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000461 SmallVectorImpl<LiveInterval*> &NewVRegs,
462 unsigned CostPerUseLimit) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000463 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
464
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000465 // Keep track of the lightest single interference seen so far.
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000466 float BestWeight = HUGE_VALF;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000467 unsigned BestPhys = 0;
468
469 Order.rewind();
470 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +0000471 if (TRI->getCostPerUse(PhysReg) >= CostPerUseLimit)
472 continue;
473 // The first use of a register in a function has cost 1.
474 if (CostPerUseLimit == 1 && !MRI->isPhysRegUsed(PhysReg))
475 continue;
476
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000477 float Weight = BestWeight;
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000478 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000479 continue;
480
481 // This is an eviction candidate.
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000482 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " interference = "
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000483 << Weight << '\n');
484 if (BestPhys && Weight >= BestWeight)
485 continue;
486
487 // Best so far.
488 BestPhys = PhysReg;
489 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000490 // Stop if the hint can be used.
491 if (Order.isHint(PhysReg))
492 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000493 }
494
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000495 if (!BestPhys)
496 return 0;
497
498 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
499 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
500 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
501 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
502 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
503 LiveInterval *Intf = Q.interferingVRegs()[i];
504 unassign(*Intf, VRM->getPhys(Intf->reg));
505 ++NumEvicted;
506 NewVRegs.push_back(Intf);
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +0000507 // Prevent looping by forcing the evicted ranges to be split before they
508 // can evict anything else.
509 if (getStage(*Intf) < RS_Second &&
510 canEvict(VirtReg, *Intf) == CE_WithSplit)
511 LRStage[Intf->reg] = RS_Second;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000512 }
513 }
514 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000515}
516
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000517
518//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000519// Region Splitting
520//===----------------------------------------------------------------------===//
521
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000522/// addSplitConstraints - Fill out the SplitConstraints vector based on the
523/// interference pattern in Physreg and its aliases. Add the constraints to
524/// SpillPlacement and return the static cost of this split in Cost, assuming
525/// that all preferences in SplitConstraints are met.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000526/// Return false if there are no bundles with positive bias.
527bool RAGreedy::addSplitConstraints(InterferenceCache::Cursor Intf,
528 float &Cost) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000529 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000530
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000531 // Reset interference dependent info.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000532 SplitConstraints.resize(UseBlocks.size());
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000533 float StaticCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000534 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
535 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000536 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000537
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000538 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000539 Intf.moveToBlock(BC.Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000540 BC.Entry = BI.LiveIn ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
541 BC.Exit = BI.LiveOut ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000542
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000543 if (!Intf.hasInterference())
544 continue;
545
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000546 // Number of spill code instructions to insert.
547 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000548
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000549 // Interference for the live-in value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000550 if (BI.LiveIn) {
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000551 if (Intf.first() <= Indexes->getMBBStartIdx(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000552 BC.Entry = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000553 else if (Intf.first() < BI.FirstUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000554 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000555 else if (Intf.first() < BI.LastUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000556 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000557 }
558
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000559 // Interference for the live-out value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000560 if (BI.LiveOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000561 if (Intf.last() >= SA->getLastSplitPoint(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000562 BC.Exit = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000563 else if (Intf.last() > BI.LastUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000564 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000565 else if (Intf.last() > BI.FirstUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000566 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000567 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000568
569 // Accumulate the total frequency of inserted spill code.
570 if (Ins)
571 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000572 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000573 Cost = StaticCost;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000574
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000575 // Add constraints for use-blocks. Note that these are the only constraints
576 // that may add a positive bias, it is downhill from here.
577 SpillPlacer->addConstraints(SplitConstraints);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000578 return SpillPlacer->scanActiveBundles();
579}
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000580
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000581
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000582/// addThroughConstraints - Add constraints and links to SpillPlacer from the
583/// live-through blocks in Blocks.
584void RAGreedy::addThroughConstraints(InterferenceCache::Cursor Intf,
585 ArrayRef<unsigned> Blocks) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000586 const unsigned GroupSize = 8;
587 SpillPlacement::BlockConstraint BCS[GroupSize];
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000588 unsigned TBS[GroupSize];
589 unsigned B = 0, T = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000590
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000591 for (unsigned i = 0; i != Blocks.size(); ++i) {
592 unsigned Number = Blocks[i];
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000593 Intf.moveToBlock(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000594
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000595 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000596 assert(T < GroupSize && "Array overflow");
597 TBS[T] = Number;
598 if (++T == GroupSize) {
599 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
600 T = 0;
601 }
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000602 continue;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000603 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000604
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000605 assert(B < GroupSize && "Array overflow");
606 BCS[B].Number = Number;
607
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000608 // Interference for the live-in value.
609 if (Intf.first() <= Indexes->getMBBStartIdx(Number))
610 BCS[B].Entry = SpillPlacement::MustSpill;
611 else
612 BCS[B].Entry = SpillPlacement::PrefSpill;
613
614 // Interference for the live-out value.
615 if (Intf.last() >= SA->getLastSplitPoint(Number))
616 BCS[B].Exit = SpillPlacement::MustSpill;
617 else
618 BCS[B].Exit = SpillPlacement::PrefSpill;
619
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000620 if (++B == GroupSize) {
621 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
622 SpillPlacer->addConstraints(Array);
623 B = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000624 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000625 }
626
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000627 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
628 SpillPlacer->addConstraints(Array);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000629 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000630}
631
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000632void RAGreedy::growRegion(GlobalSplitCandidate &Cand,
633 InterferenceCache::Cursor Intf) {
634 // Keep track of through blocks that have not been added to SpillPlacer.
635 BitVector Todo = SA->getThroughBlocks();
636 SmallVectorImpl<unsigned> &ActiveBlocks = Cand.ActiveBlocks;
637 unsigned AddedTo = 0;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000638#ifndef NDEBUG
639 unsigned Visited = 0;
640#endif
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000641
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000642 for (;;) {
643 ArrayRef<unsigned> NewBundles = SpillPlacer->getRecentPositive();
644 if (NewBundles.empty())
645 break;
646 // Find new through blocks in the periphery of PrefRegBundles.
647 for (int i = 0, e = NewBundles.size(); i != e; ++i) {
648 unsigned Bundle = NewBundles[i];
649 // Look at all blocks connected to Bundle in the full graph.
650 ArrayRef<unsigned> Blocks = Bundles->getBlocks(Bundle);
651 for (ArrayRef<unsigned>::iterator I = Blocks.begin(), E = Blocks.end();
652 I != E; ++I) {
653 unsigned Block = *I;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000654 if (!Todo.test(Block))
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000655 continue;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000656 Todo.reset(Block);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000657 // This is a new through block. Add it to SpillPlacer later.
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000658 ActiveBlocks.push_back(Block);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000659#ifndef NDEBUG
660 ++Visited;
661#endif
662 }
663 }
664 // Any new blocks to add?
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000665 if (ActiveBlocks.size() > AddedTo) {
666 ArrayRef<unsigned> Add(&ActiveBlocks[AddedTo],
667 ActiveBlocks.size() - AddedTo);
668 addThroughConstraints(Intf, Add);
669 AddedTo = ActiveBlocks.size();
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000670 }
671 // Perhaps iterating can enable more bundles?
672 SpillPlacer->iterate();
673 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000674 DEBUG(dbgs() << ", v=" << Visited);
675}
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000676
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +0000677/// calcSpillCost - Compute how expensive it would be to split the live range in
678/// SA around all use blocks instead of forming bundle regions.
679float RAGreedy::calcSpillCost() {
680 float Cost = 0;
681 const LiveInterval &LI = SA->getParent();
682 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
683 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
684 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
685 unsigned Number = BI.MBB->getNumber();
686 // We normally only need one spill instruction - a load or a store.
687 Cost += SpillPlacer->getBlockFrequency(Number);
688
689 // Unless the value is redefined in the block.
690 if (BI.LiveIn && BI.LiveOut) {
691 SlotIndex Start, Stop;
692 tie(Start, Stop) = Indexes->getMBBRange(Number);
693 LiveInterval::const_iterator I = LI.find(Start);
694 assert(I != LI.end() && "Expected live-in value");
695 // Is there a different live-out value? If so, we need an extra spill
696 // instruction.
697 if (I->end < Stop)
698 Cost += SpillPlacer->getBlockFrequency(Number);
699 }
700 }
701 return Cost;
702}
703
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000704/// calcGlobalSplitCost - Return the global split cost of following the split
705/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000706/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000707///
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000708float RAGreedy::calcGlobalSplitCost(GlobalSplitCandidate &Cand,
709 InterferenceCache::Cursor Intf) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000710 float GlobalCost = 0;
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000711 const BitVector &LiveBundles = Cand.LiveBundles;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000712 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
713 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
714 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000715 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000716 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
717 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
718 unsigned Ins = 0;
719
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000720 if (BI.LiveIn)
721 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
722 if (BI.LiveOut)
723 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000724 if (Ins)
725 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000726 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000727
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000728 for (unsigned i = 0, e = Cand.ActiveBlocks.size(); i != e; ++i) {
729 unsigned Number = Cand.ActiveBlocks[i];
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000730 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
731 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000732 if (!RegIn && !RegOut)
733 continue;
734 if (RegIn && RegOut) {
735 // We need double spill code if this block has interference.
736 Intf.moveToBlock(Number);
737 if (Intf.hasInterference())
738 GlobalCost += 2*SpillPlacer->getBlockFrequency(Number);
739 continue;
740 }
741 // live-in / stack-out or stack-in live-out.
742 GlobalCost += SpillPlacer->getBlockFrequency(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000743 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000744 return GlobalCost;
745}
746
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000747/// splitAroundRegion - Split VirtReg around the region determined by
748/// LiveBundles. Make an effort to avoid interference from PhysReg.
749///
750/// The 'register' interval is going to contain as many uses as possible while
751/// avoiding interference. The 'stack' interval is the complement constructed by
752/// SplitEditor. It will contain the rest.
753///
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000754void RAGreedy::splitAroundRegion(LiveInterval &VirtReg,
755 GlobalSplitCandidate &Cand,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000756 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000757 const BitVector &LiveBundles = Cand.LiveBundles;
758
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000759 DEBUG({
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000760 dbgs() << "Splitting around region for " << PrintReg(Cand.PhysReg, TRI)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000761 << " with bundles";
762 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
763 dbgs() << " EB#" << i;
764 dbgs() << ".\n";
765 });
766
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000767 InterferenceCache::Cursor Intf(IntfCache, Cand.PhysReg);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000768 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000769 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000770
771 // Create the main cross-block interval.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +0000772 const unsigned MainIntv = SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000773
774 // First add all defs that are live out of a block.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000775 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
776 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
777 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000778 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
779 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
780
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +0000781 // Create separate intervals for isolated blocks with multiple uses.
782 if (!RegIn && !RegOut && BI.FirstUse != BI.LastUse) {
783 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " isolated.\n");
784 SE->splitSingleBlock(BI);
785 SE->selectIntv(MainIntv);
786 continue;
787 }
788
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000789 // Should the register be live out?
790 if (!BI.LiveOut || !RegOut)
791 continue;
792
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000793 SlotIndex Start, Stop;
794 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000795 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000796 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000797 << Bundles->getBundle(BI.MBB->getNumber(), 1)
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000798 << " [" << Start << ';'
799 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
800 << ") intf [" << Intf.first() << ';' << Intf.last() << ')');
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000801
802 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000803 assert((!Intf.hasInterference() || Intf.first() < Stop)
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000804 && "Bad interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000805 assert((!Intf.hasInterference() || Intf.last() > Start)
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000806 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000807
808 // Check interference leaving the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000809 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000810 // Block is interference-free.
811 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000812 if (!BI.LiveThrough) {
813 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000814 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000815 continue;
816 }
817 if (!RegIn) {
818 // Block is live-through, but entry bundle is on the stack.
819 // Reload just before the first use.
820 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000821 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000822 continue;
823 }
824 DEBUG(dbgs() << ", live-through.\n");
825 continue;
826 }
827
828 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000829 DEBUG(dbgs() << ", interference to " << Intf.last());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000830
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000831 if (!BI.LiveThrough && Intf.last() <= BI.FirstUse) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000832 // The interference doesn't reach the outgoing segment.
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000833 DEBUG(dbgs() << " doesn't affect def from " << BI.FirstUse << '\n');
834 SE->useIntv(BI.FirstUse, Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000835 continue;
836 }
837
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000838 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000839 if (Intf.last().getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000840 // There are interference-free uses at the end of the block.
841 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000842 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000843 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000844 Intf.last().getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000845 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
846 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000847 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000848 // Only attempt a split befroe the last split point.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000849 if (Use.getBaseIndex() <= LastSplitPoint) {
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000850 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000851 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000852 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000853 assert(SegStart < LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000854 SE->useIntv(SegStart, Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000855 continue;
856 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000857 }
858
859 // Interference is after the last use.
860 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000861 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000862 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000863 }
864
865 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000866 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
867 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000868 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
869 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
870
871 // Is the register live-in?
872 if (!BI.LiveIn || !RegIn)
873 continue;
874
875 // We have an incoming register. Check for interference.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000876 SlotIndex Start, Stop;
877 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000878 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000879 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000880 << " -> BB#" << BI.MBB->getNumber() << " [" << Start << ';'
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000881 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
882 << ')');
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000883
884 // Check interference entering the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000885 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000886 // Block is interference-free.
887 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000888 if (!BI.LiveThrough) {
889 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000890 SE->useIntv(Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000891 continue;
892 }
893 if (!RegOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000894 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000895 // Block is live-through, but exit bundle is on the stack.
896 // Spill immediately after the last use.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000897 if (BI.LastUse < LastSplitPoint) {
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000898 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000899 SE->useIntv(Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000900 continue;
901 }
902 // The last use is after the last split point, it is probably an
903 // indirect jump.
904 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000905 << LastSplitPoint << ", stack-out.\n");
906 SlotIndex SegEnd = SE->leaveIntvBefore(LastSplitPoint);
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000907 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000908 // Run a double interval from the split to the last use.
909 // This makes it possible to spill the complement without affecting the
910 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000911 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000912 continue;
913 }
914 // Register is live-through.
915 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000916 SE->useIntv(Start, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000917 continue;
918 }
919
920 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000921 DEBUG(dbgs() << ", interference from " << Intf.first());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000922
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000923 if (!BI.LiveThrough && Intf.first() >= BI.LastUse) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000924 // The interference doesn't reach the outgoing segment.
Jakob Stoklund Olesena2e79ef2011-05-30 01:33:26 +0000925 DEBUG(dbgs() << " doesn't affect kill at " << BI.LastUse << '\n');
926 SE->useIntv(Start, BI.LastUse);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000927 continue;
928 }
929
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000930 if (Intf.first().getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000931 // There are interference-free uses at the beginning of the block.
932 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000933 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000934 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000935 Intf.first().getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000936 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
937 SlotIndex Use = (--UI)->getBoundaryIndex();
938 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000939 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000940 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000941 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000942 continue;
943 }
944
945 // Interference is before the first use.
946 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000947 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000948 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000949 }
950
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000951 // Handle live-through blocks.
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +0000952 for (unsigned i = 0, e = Cand.ActiveBlocks.size(); i != e; ++i) {
953 unsigned Number = Cand.ActiveBlocks[i];
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000954 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
955 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
956 DEBUG(dbgs() << "Live through BB#" << Number << '\n');
957 if (RegIn && RegOut) {
958 Intf.moveToBlock(Number);
959 if (!Intf.hasInterference()) {
960 SE->useIntv(Indexes->getMBBStartIdx(Number),
961 Indexes->getMBBEndIdx(Number));
962 continue;
963 }
964 }
965 MachineBasicBlock *MBB = MF->getBlockNumbered(Number);
966 if (RegIn)
967 SE->leaveIntvAtTop(*MBB);
968 if (RegOut)
969 SE->enterIntvAtEnd(*MBB);
970 }
971
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000972 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000973
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000974 SmallVector<unsigned, 8> IntvMap;
975 SE->finish(&IntvMap);
Jakob Stoklund Olesenf42b6612011-05-06 18:00:02 +0000976 DebugVars->splitRegister(VirtReg.reg, LREdit.regs());
977
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000978 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesenb2abfa02011-05-28 02:32:57 +0000979 unsigned OrigBlocks = SA->getNumLiveBlocks();
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +0000980
981 // Sort out the new intervals created by splitting. We get four kinds:
982 // - Remainder intervals should not be split again.
983 // - Candidate intervals can be assigned to Cand.PhysReg.
984 // - Block-local splits are candidates for local splitting.
985 // - DCE leftovers should go back on the queue.
986 for (unsigned i = 0, e = LREdit.size(); i != e; ++i) {
987 unsigned Reg = LREdit.get(i)->reg;
988
989 // Ignore old intervals from DCE.
990 if (LRStage[Reg] != RS_New)
991 continue;
992
993 // Remainder interval. Don't try splitting again, spill if it doesn't
994 // allocate.
995 if (IntvMap[i] == 0) {
996 LRStage[Reg] = RS_Global;
997 continue;
998 }
999
Jakob Stoklund Olesen9f4b8932011-04-26 22:33:12 +00001000 // Main interval. Allow repeated splitting as long as the number of live
1001 // blocks is strictly decreasing.
1002 if (IntvMap[i] == MainIntv) {
1003 if (SA->countLiveBlocks(LREdit.get(i)) >= OrigBlocks) {
1004 DEBUG(dbgs() << "Main interval covers the same " << OrigBlocks
1005 << " blocks as original.\n");
1006 // Don't allow repeated splitting as a safe guard against looping.
1007 LRStage[Reg] = RS_Global;
1008 }
1009 continue;
1010 }
1011
1012 // Other intervals are treated as new. This includes local intervals created
1013 // for blocks with multiple uses, and anything created by DCE.
Jakob Stoklund Olesen59280462011-04-21 18:38:15 +00001014 }
1015
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001016 if (VerifyEnabled)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001017 MF->verify(this, "After splitting live range around region");
1018}
1019
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001020unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1021 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001022 float BestCost = Hysteresis * calcSpillCost();
1023 DEBUG(dbgs() << "Cost of isolating all blocks = " << BestCost << '\n');
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001024 const unsigned NoCand = ~0u;
1025 unsigned BestCand = NoCand;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +00001026
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001027 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +00001028 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
1029 if (GlobalCand.size() <= Cand)
1030 GlobalCand.resize(Cand+1);
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001031 GlobalCand[Cand].reset(PhysReg);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +00001032
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001033 SpillPlacer->prepare(GlobalCand[Cand].LiveBundles);
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +00001034 float Cost;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +00001035 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
1036 if (!addSplitConstraints(Intf, Cost)) {
1037 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tno positive bundles\n");
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +00001038 continue;
1039 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +00001040 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tstatic = " << Cost);
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001041 if (Cost >= BestCost) {
1042 DEBUG({
1043 if (BestCand == NoCand)
1044 dbgs() << " worse than no bundles\n";
1045 else
1046 dbgs() << " worse than "
1047 << PrintReg(GlobalCand[BestCand].PhysReg, TRI) << '\n';
1048 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001049 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001050 }
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001051 growRegion(GlobalCand[Cand], Intf);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001052
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +00001053 SpillPlacer->finish();
1054
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001055 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001056 if (!GlobalCand[Cand].LiveBundles.any()) {
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001057 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001058 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001059 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001060
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001061 Cost += calcGlobalSplitCost(GlobalCand[Cand], Intf);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001062 DEBUG({
1063 dbgs() << ", total = " << Cost << " with bundles";
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001064 for (int i = GlobalCand[Cand].LiveBundles.find_first(); i>=0;
1065 i = GlobalCand[Cand].LiveBundles.find_next(i))
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +00001066 dbgs() << " EB#" << i;
1067 dbgs() << ".\n";
1068 });
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001069 if (Cost < BestCost) {
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001070 BestCand = Cand;
Jakob Stoklund Olesen20072982011-04-22 22:47:40 +00001071 BestCost = Hysteresis * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001072 }
1073 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001074
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001075 if (BestCand == NoCand)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001076 return 0;
1077
Jakob Stoklund Olesen5db42892011-04-12 21:30:53 +00001078 splitAroundRegion(VirtReg, GlobalCand[BestCand], NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001079 return 0;
1080}
1081
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001082
1083//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001084// Local Splitting
1085//===----------------------------------------------------------------------===//
1086
1087
1088/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
1089/// in order to use PhysReg between two entries in SA->UseSlots.
1090///
1091/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
1092///
1093void RAGreedy::calcGapWeights(unsigned PhysReg,
1094 SmallVectorImpl<float> &GapWeight) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +00001095 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
1096 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001097 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1098 const unsigned NumGaps = Uses.size()-1;
1099
1100 // Start and end points for the interference check.
1101 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
1102 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
1103
1104 GapWeight.assign(NumGaps, 0.0f);
1105
1106 // Add interference from each overlapping register.
1107 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1108 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
1109 .checkInterference())
1110 continue;
1111
1112 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
1113 // so we don't need InterferenceQuery.
1114 //
1115 // Interference that overlaps an instruction is counted in both gaps
1116 // surrounding the instruction. The exception is interference before
1117 // StartIdx and after StopIdx.
1118 //
1119 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
1120 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
1121 // Skip the gaps before IntI.
1122 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
1123 if (++Gap == NumGaps)
1124 break;
1125 if (Gap == NumGaps)
1126 break;
1127
1128 // Update the gaps covered by IntI.
1129 const float weight = IntI.value()->weight;
1130 for (; Gap != NumGaps; ++Gap) {
1131 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
1132 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
1133 break;
1134 }
1135 if (Gap == NumGaps)
1136 break;
1137 }
1138 }
1139}
1140
1141/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
1142/// before MI that has a slot index. If MI is the first mapped instruction in
1143/// its block, return the block start index instead.
1144///
1145SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
1146 assert(MI && "Missing MachineInstr");
1147 const MachineBasicBlock *MBB = MI->getParent();
1148 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
1149 while (I != B)
1150 if (!(--I)->isDebugValue() && !I->isCopy())
1151 return Indexes->getInstructionIndex(I);
1152 return Indexes->getMBBStartIdx(MBB);
1153}
1154
1155/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
1156/// real non-copy instruction for each instruction in SA->UseSlots.
1157///
1158void RAGreedy::calcPrevSlots() {
1159 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1160 PrevSlot.clear();
1161 PrevSlot.reserve(Uses.size());
1162 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
1163 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
1164 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
1165 }
1166}
1167
1168/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
1169/// be beneficial to split before UseSlots[i].
1170///
1171/// 0 is always a valid split point
1172unsigned RAGreedy::nextSplitPoint(unsigned i) {
1173 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1174 const unsigned Size = Uses.size();
1175 assert(i != Size && "No split points after the end");
1176 // Allow split before i when Uses[i] is not adjacent to the previous use.
1177 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
1178 ;
1179 return i;
1180}
1181
1182/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1183/// basic block.
1184///
1185unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1186 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +00001187 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
1188 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001189
1190 // Note that it is possible to have an interval that is live-in or live-out
1191 // while only covering a single block - A phi-def can use undef values from
1192 // predecessors, and the block could be a single-block loop.
1193 // We don't bother doing anything clever about such a case, we simply assume
1194 // that the interval is continuous from FirstUse to LastUse. We should make
1195 // sure that we don't do anything illegal to such an interval, though.
1196
1197 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1198 if (Uses.size() <= 2)
1199 return 0;
1200 const unsigned NumGaps = Uses.size()-1;
1201
1202 DEBUG({
1203 dbgs() << "tryLocalSplit: ";
1204 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1205 dbgs() << ' ' << SA->UseSlots[i];
1206 dbgs() << '\n';
1207 });
1208
1209 // For every use, find the previous mapped non-copy instruction.
1210 // We use this to detect valid split points, and to estimate new interval
1211 // sizes.
1212 calcPrevSlots();
1213
1214 unsigned BestBefore = NumGaps;
1215 unsigned BestAfter = 0;
1216 float BestDiff = 0;
1217
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +00001218 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001219 SmallVector<float, 8> GapWeight;
1220
1221 Order.rewind();
1222 while (unsigned PhysReg = Order.next()) {
1223 // Keep track of the largest spill weight that would need to be evicted in
1224 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1225 calcGapWeights(PhysReg, GapWeight);
1226
1227 // Try to find the best sequence of gaps to close.
1228 // The new spill weight must be larger than any gap interference.
1229
1230 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1231 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1232
1233 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1234 // It is the spill weight that needs to be evicted.
1235 float MaxGap = GapWeight[0];
1236 for (unsigned i = 1; i != SplitAfter; ++i)
1237 MaxGap = std::max(MaxGap, GapWeight[i]);
1238
1239 for (;;) {
1240 // Live before/after split?
1241 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1242 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1243
1244 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1245 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1246 << " i=" << MaxGap);
1247
1248 // Stop before the interval gets so big we wouldn't be making progress.
1249 if (!LiveBefore && !LiveAfter) {
1250 DEBUG(dbgs() << " all\n");
1251 break;
1252 }
1253 // Should the interval be extended or shrunk?
1254 bool Shrink = true;
1255 if (MaxGap < HUGE_VALF) {
1256 // Estimate the new spill weight.
1257 //
1258 // Each instruction reads and writes the register, except the first
1259 // instr doesn't read when !FirstLive, and the last instr doesn't write
1260 // when !LastLive.
1261 //
1262 // We will be inserting copies before and after, so the total number of
1263 // reads and writes is 2 * EstUses.
1264 //
1265 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1266 2*(LiveBefore + LiveAfter);
1267
1268 // Try to guess the size of the new interval. This should be trivial,
1269 // but the slot index of an inserted copy can be a lot smaller than the
1270 // instruction it is inserted before if there are many dead indexes
1271 // between them.
1272 //
1273 // We measure the distance from the instruction before SplitBefore to
1274 // get a conservative estimate.
1275 //
1276 // The final distance can still be different if inserting copies
1277 // triggers a slot index renumbering.
1278 //
1279 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1280 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1281 // Would this split be possible to allocate?
1282 // Never allocate all gaps, we wouldn't be making progress.
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001283 DEBUG(dbgs() << " w=" << EstWeight);
1284 if (EstWeight * Hysteresis >= MaxGap) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001285 Shrink = false;
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001286 float Diff = EstWeight - MaxGap;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001287 if (Diff > BestDiff) {
1288 DEBUG(dbgs() << " (best)");
Jakob Stoklund Olesen66446c82011-04-30 05:07:46 +00001289 BestDiff = Hysteresis * Diff;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001290 BestBefore = SplitBefore;
1291 BestAfter = SplitAfter;
1292 }
1293 }
1294 }
1295
1296 // Try to shrink.
1297 if (Shrink) {
1298 SplitBefore = nextSplitPoint(SplitBefore);
1299 if (SplitBefore < SplitAfter) {
1300 DEBUG(dbgs() << " shrink\n");
1301 // Recompute the max when necessary.
1302 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1303 MaxGap = GapWeight[SplitBefore];
1304 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1305 MaxGap = std::max(MaxGap, GapWeight[i]);
1306 }
1307 continue;
1308 }
1309 MaxGap = 0;
1310 }
1311
1312 // Try to extend the interval.
1313 if (SplitAfter >= NumGaps) {
1314 DEBUG(dbgs() << " end\n");
1315 break;
1316 }
1317
1318 DEBUG(dbgs() << " extend\n");
1319 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1320 SplitAfter != e; ++SplitAfter)
1321 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1322 continue;
1323 }
1324 }
1325
1326 // Didn't find any candidates?
1327 if (BestBefore == NumGaps)
1328 return 0;
1329
1330 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1331 << '-' << Uses[BestAfter] << ", " << BestDiff
1332 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1333
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001334 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001335 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001336
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001337 SE->openIntv();
1338 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1339 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1340 SE->useIntv(SegStart, SegStop);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001341 SE->finish();
Jakob Stoklund Olesenf42b6612011-05-06 18:00:02 +00001342 DebugVars->splitRegister(VirtReg.reg, LREdit.regs());
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001343 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001344 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001345
1346 return 0;
1347}
1348
1349//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001350// Live Range Splitting
1351//===----------------------------------------------------------------------===//
1352
1353/// trySplit - Try to split VirtReg or one of its interferences, making it
1354/// assignable.
1355/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1356unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1357 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001358 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001359 if (LIS->intervalIsInOneMBB(VirtReg)) {
1360 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001361 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001362 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001363 }
1364
1365 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001366
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001367 // Don't iterate global splitting.
1368 // Move straight to spilling if this range was produced by a global split.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001369 if (getStage(VirtReg) >= RS_Global)
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001370 return 0;
1371
1372 SA->analyze(&VirtReg);
1373
Jakob Stoklund Olesen7d6b6a02011-05-03 20:42:13 +00001374 // FIXME: SplitAnalysis may repair broken live ranges coming from the
1375 // coalescer. That may cause the range to become allocatable which means that
1376 // tryRegionSplit won't be making progress. This check should be replaced with
1377 // an assertion when the coalescer is fixed.
1378 if (SA->didRepairRange()) {
1379 // VirtReg has changed, so all cached queries are invalid.
Jakob Stoklund Olesenbdda37d2011-05-10 17:37:41 +00001380 invalidateVirtRegs();
Jakob Stoklund Olesen7d6b6a02011-05-03 20:42:13 +00001381 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1382 return PhysReg;
1383 }
1384
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001385 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001386 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1387 if (PhysReg || !NewVRegs.empty())
1388 return PhysReg;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001389
1390 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesenfd5c5132011-04-12 19:32:53 +00001391 SplitAnalysis::BlockPtrSet Blocks;
1392 if (SA->getMultiUseBlocks(Blocks)) {
1393 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
1394 SE->reset(LREdit);
1395 SE->splitSingleBlocks(Blocks);
1396 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Global);
1397 if (VerifyEnabled)
1398 MF->verify(this, "After splitting live range around basic blocks");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001399 }
1400
1401 // Don't assign any physregs.
1402 return 0;
1403}
1404
1405
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001406//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001407// Main Entry Point
1408//===----------------------------------------------------------------------===//
1409
1410unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001411 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001412 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001413 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
Jakob Stoklund Olesen6bfba2e2011-04-20 18:19:48 +00001414 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1415 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001416
Jakob Stoklund Olesenb8d936b2011-05-25 23:58:36 +00001417 LiveRangeStage Stage = getStage(VirtReg);
1418 DEBUG(dbgs() << StageName[Stage] << '\n');
1419
Jakob Stoklund Olesen76395c92011-06-01 18:45:02 +00001420 // Try to evict a less worthy live range, but only for ranges from the primary
1421 // queue. The RS_Second ranges already failed to do this, and they should not
1422 // get a second chance until they have been split.
1423 if (Stage != RS_Second)
1424 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
1425 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001426
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001427 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1428
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001429 // The first time we see a live range, don't try to split or spill.
1430 // Wait until the second time, when all smaller ranges have been allocated.
1431 // This gives a better picture of the interference to split around.
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +00001432 if (Stage == RS_First) {
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001433 LRStage[VirtReg.reg] = RS_Second;
Jakob Stoklund Olesenc1655e12011-03-19 23:02:47 +00001434 DEBUG(dbgs() << "wait for second round\n");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001435 NewVRegs.push_back(&VirtReg);
1436 return 0;
1437 }
1438
Jakob Stoklund Olesenbf4e10f2011-05-06 21:58:30 +00001439 // If we couldn't allocate a register from spilling, there is probably some
1440 // invalid inline assembly. The base class wil report it.
1441 if (Stage >= RS_Spill)
1442 return ~0u;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001443
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001444 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001445 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1446 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001447 return PhysReg;
1448
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001449 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001450 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen47dbf6c2011-03-10 01:51:42 +00001451 LiveRangeEdit LRE(VirtReg, NewVRegs, this);
1452 spiller().spill(LRE);
Jakob Stoklund Olesen6094bd82011-03-29 21:20:19 +00001453 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Spill);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001454
Jakob Stoklund Olesenc46570d2011-03-16 22:56:08 +00001455 if (VerifyEnabled)
1456 MF->verify(this, "After spilling");
1457
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001458 // The live virtual register requesting allocation was spilled, so tell
1459 // the caller not to allocate anything during this round.
1460 return 0;
1461}
1462
1463bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1464 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1465 << "********** Function: "
1466 << ((Value*)mf.getFunction())->getName() << '\n');
1467
1468 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001469 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001470 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001471
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001472 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001473 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001474 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001475 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001476 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001477 Loops = &getAnalysis<MachineLoopInfo>();
1478 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001479 Bundles = &getAnalysis<EdgeBundles>();
1480 SpillPlacer = &getAnalysis<SpillPlacement>();
Jakob Stoklund Olesenf42b6612011-05-06 18:00:02 +00001481 DebugVars = &getAnalysis<LiveDebugVariables>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001482
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001483 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001484 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001485 LRStage.clear();
1486 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +00001487 IntfCache.init(MF, &PhysReg2LiveUnion[0], Indexes, TRI);
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001488
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001489 allocatePhysRegs();
1490 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001491 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001492
1493 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001494 {
1495 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001496 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001497 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001498
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +00001499 // Write out new DBG_VALUE instructions.
Jakob Stoklund Olesenf42b6612011-05-06 18:00:02 +00001500 DebugVars->emitDebugValues(VRM);
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +00001501
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001502 // The pass output is in VirtRegMap. Release all the transient data.
1503 releaseMemory();
1504
1505 return true;
1506}