blob: 66ef62b1e9d7af57b5bef1aaf293d595a9291736 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesen5907d862011-04-02 06:03:35 +000017#include "InterferenceCache.h"
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +000018#include "LiveDebugVariables.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000019#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000020#include "RegAllocBase.h"
21#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000022#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000023#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000024#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000025#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000026#include "llvm/Analysis/AliasAnalysis.h"
27#include "llvm/Function.h"
28#include "llvm/PassAnalysisSupport.h"
29#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000030#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000031#include "llvm/CodeGen/LiveIntervalAnalysis.h"
32#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000033#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000035#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000036#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000037#include "llvm/CodeGen/MachineRegisterInfo.h"
38#include "llvm/CodeGen/Passes.h"
39#include "llvm/CodeGen/RegAllocRegistry.h"
40#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000042#include "llvm/Support/Debug.h"
43#include "llvm/Support/ErrorHandling.h"
44#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000045#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000046
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000047#include <queue>
48
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000049using namespace llvm;
50
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000051STATISTIC(NumGlobalSplits, "Number of split global live ranges");
52STATISTIC(NumLocalSplits, "Number of split local live ranges");
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000053STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +000059class RAGreedy : public MachineFunctionPass,
60 public RegAllocBase,
61 private LiveRangeEdit::Delegate {
62
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000063 // context
64 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000065 BitVector ReservedRegs;
66
67 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000068 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000070 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000071 MachineLoopInfo *Loops;
72 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000073 EdgeBundles *Bundles;
74 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000075
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000076 // state
77 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000078 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000079
80 // Live ranges pass through a number of stages as we try to allocate them.
81 // Some of the stages may also create new live ranges:
82 //
83 // - Region splitting.
84 // - Per-block splitting.
85 // - Local splitting.
86 // - Spilling.
87 //
88 // Ranges produced by one of the stages skip the previous stages when they are
89 // dequeued. This improves performance because we can skip interference checks
90 // that are unlikely to give any results. It also guarantees that the live
91 // range splitting algorithm terminates, something that is otherwise hard to
92 // ensure.
93 enum LiveRangeStage {
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +000094 RS_New, ///< Never seen before.
95 RS_First, ///< First time in the queue.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000096 RS_Second, ///< Second time in the queue.
97 RS_Region, ///< Produced by region splitting.
98 RS_Block, ///< Produced by per-block splitting.
99 RS_Local, ///< Produced by local splitting.
100 RS_Spill ///< Produced by spilling.
101 };
102
103 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
104
105 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
106 return LiveRangeStage(LRStage[VirtReg.reg]);
107 }
108
109 template<typename Iterator>
110 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
111 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000112 for (;Begin != End; ++Begin) {
113 unsigned Reg = (*Begin)->reg;
114 if (LRStage[Reg] == RS_New)
115 LRStage[Reg] = NewStage;
116 }
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000117 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000118
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000119 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000120 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000121 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000122
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000123 /// Cached per-block interference maps
124 InterferenceCache IntfCache;
125
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000126 /// All basic blocks where the current register is live.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000127 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000128
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000129 /// Global live range splitting candidate info.
130 struct GlobalSplitCandidate {
131 unsigned PhysReg;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000132 BitVector LiveBundles;
133 };
134
135 /// Candidate info for for each PhysReg in AllocationOrder.
136 /// This vector never shrinks, but grows to the size of the largest register
137 /// class.
138 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
139
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000140 /// For every instruction in SA->UseSlots, store the previous non-copy
141 /// instruction.
142 SmallVector<SlotIndex, 8> PrevSlot;
143
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000144public:
145 RAGreedy();
146
147 /// Return the pass name.
148 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000149 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000150 }
151
152 /// RAGreedy analysis usage.
153 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000154 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000155 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000156 virtual void enqueue(LiveInterval *LI);
157 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000158 virtual unsigned selectOrSplit(LiveInterval&,
159 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000160
161 /// Perform register allocation.
162 virtual bool runOnMachineFunction(MachineFunction &mf);
163
164 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000165
166private:
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000167 void LRE_WillEraseInstruction(MachineInstr*);
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000168 bool LRE_CanEraseVirtReg(unsigned);
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000169 void LRE_WillShrinkVirtReg(unsigned);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000170 void LRE_DidCloneVirtReg(unsigned, unsigned);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000171
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000172 bool addSplitConstraints(unsigned, float&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000173 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000174 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
175 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000176 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
177 SlotIndex getPrevMappedIndex(const MachineInstr*);
178 void calcPrevSlots();
179 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000180 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000181
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000182 unsigned tryEvict(LiveInterval&, AllocationOrder&,
183 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000184 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
185 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000186 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
187 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000188 unsigned trySplit(LiveInterval&, AllocationOrder&,
189 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000190};
191} // end anonymous namespace
192
193char RAGreedy::ID = 0;
194
195FunctionPass* llvm::createGreedyRegisterAllocator() {
196 return new RAGreedy();
197}
198
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000199RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_New) {
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000200 initializeLiveDebugVariablesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000201 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000202 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
203 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
204 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
205 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
206 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
207 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
208 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
209 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000210 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000211 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000212 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
213 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000214}
215
216void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
217 AU.setPreservesCFG();
218 AU.addRequired<AliasAnalysis>();
219 AU.addPreserved<AliasAnalysis>();
220 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000221 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000222 AU.addPreserved<SlotIndexes>();
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000223 AU.addRequired<LiveDebugVariables>();
224 AU.addPreserved<LiveDebugVariables>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000225 if (StrongPHIElim)
226 AU.addRequiredID(StrongPHIEliminationID);
227 AU.addRequiredTransitive<RegisterCoalescer>();
228 AU.addRequired<CalculateSpillWeights>();
229 AU.addRequired<LiveStacks>();
230 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000231 AU.addRequired<MachineDominatorTree>();
232 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000233 AU.addRequired<MachineLoopInfo>();
234 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000235 AU.addRequired<MachineLoopRanges>();
236 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000237 AU.addRequired<VirtRegMap>();
238 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000239 AU.addRequired<EdgeBundles>();
240 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000241 MachineFunctionPass::getAnalysisUsage(AU);
242}
243
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000244
245//===----------------------------------------------------------------------===//
246// LiveRangeEdit delegate methods
247//===----------------------------------------------------------------------===//
248
249void RAGreedy::LRE_WillEraseInstruction(MachineInstr *MI) {
250 // LRE itself will remove from SlotIndexes and parent basic block.
251 VRM->RemoveMachineInstrFromMaps(MI);
252}
253
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000254bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) {
255 if (unsigned PhysReg = VRM->getPhys(VirtReg)) {
256 unassign(LIS->getInterval(VirtReg), PhysReg);
257 return true;
258 }
259 // Unassigned virtreg is probably in the priority queue.
260 // RegAllocBase will erase it after dequeueing.
261 return false;
262}
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000263
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000264void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) {
265 unsigned PhysReg = VRM->getPhys(VirtReg);
266 if (!PhysReg)
267 return;
268
269 // Register is assigned, put it back on the queue for reassignment.
270 LiveInterval &LI = LIS->getInterval(VirtReg);
271 unassign(LI, PhysReg);
272 enqueue(&LI);
273}
274
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000275void RAGreedy::LRE_DidCloneVirtReg(unsigned New, unsigned Old) {
276 // LRE may clone a virtual register because dead code elimination causes it to
277 // be split into connected components. Ensure that the new register gets the
278 // same stage as the parent.
279 LRStage.grow(New);
280 LRStage[New] = LRStage[Old];
281}
282
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000283void RAGreedy::releaseMemory() {
284 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000285 LRStage.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000286 RegAllocBase::releaseMemory();
287}
288
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000289void RAGreedy::enqueue(LiveInterval *LI) {
290 // Prioritize live ranges by size, assigning larger ranges first.
291 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000292 const unsigned Size = LI->getSize();
293 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000294 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
295 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000296 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000297
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000298 LRStage.grow(Reg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000299 if (LRStage[Reg] == RS_New)
300 LRStage[Reg] = RS_First;
301
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000302 if (LRStage[Reg] == RS_Second)
303 // Unsplit ranges that couldn't be allocated immediately are deferred until
304 // everything else has been allocated. Long ranges are allocated last so
305 // they are split against realistic interference.
306 Prio = (1u << 31) - Size;
307 else {
308 // Everything else is allocated in long->short order. Long ranges that don't
309 // fit should be spilled ASAP so they don't create interference.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000310 Prio = (1u << 31) + Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000311
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000312 // Boost ranges that have a physical register hint.
313 if (TargetRegisterInfo::isPhysicalRegister(VRM->getRegAllocPref(Reg)))
314 Prio |= (1u << 30);
315 }
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000316
317 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000318}
319
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000320LiveInterval *RAGreedy::dequeue() {
321 if (Queue.empty())
322 return 0;
323 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
324 Queue.pop();
325 return LI;
326}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000327
328//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000329// Interference eviction
330//===----------------------------------------------------------------------===//
331
332/// canEvict - Return true if all interferences between VirtReg and PhysReg can
333/// be evicted. Set maxWeight to the maximal spill weight of an interference.
334bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000335 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000336 float Weight = 0;
337 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
338 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
339 // If there is 10 or more interferences, chances are one is smaller.
340 if (Q.collectInterferingVRegs(10) >= 10)
341 return false;
342
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000343 // Check if any interfering live range is heavier than VirtReg.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000344 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
345 LiveInterval *Intf = Q.interferingVRegs()[i];
346 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
347 return false;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000348 if (Intf->weight >= VirtReg.weight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000349 return false;
350 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000351 }
352 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000353 MaxWeight = Weight;
354 return true;
355}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000356
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000357/// tryEvict - Try to evict all interferences for a physreg.
358/// @param VirtReg Currently unassigned virtual register.
359/// @param Order Physregs to try.
360/// @return Physreg to assign VirtReg, or 0.
361unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
362 AllocationOrder &Order,
363 SmallVectorImpl<LiveInterval*> &NewVRegs){
364 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
365
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000366 // Keep track of the lightest single interference seen so far.
367 float BestWeight = 0;
368 unsigned BestPhys = 0;
369
370 Order.rewind();
371 while (unsigned PhysReg = Order.next()) {
372 float Weight = 0;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000373 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000374 continue;
375
376 // This is an eviction candidate.
377 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
378 << Weight << '\n');
379 if (BestPhys && Weight >= BestWeight)
380 continue;
381
382 // Best so far.
383 BestPhys = PhysReg;
384 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000385 // Stop if the hint can be used.
386 if (Order.isHint(PhysReg))
387 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000388 }
389
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000390 if (!BestPhys)
391 return 0;
392
393 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
394 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
395 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
396 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
397 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
398 LiveInterval *Intf = Q.interferingVRegs()[i];
399 unassign(*Intf, VRM->getPhys(Intf->reg));
400 ++NumEvicted;
401 NewVRegs.push_back(Intf);
402 }
403 }
404 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000405}
406
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000407
408//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000409// Region Splitting
410//===----------------------------------------------------------------------===//
411
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000412/// addSplitConstraints - Fill out the SplitConstraints vector based on the
413/// interference pattern in Physreg and its aliases. Add the constraints to
414/// SpillPlacement and return the static cost of this split in Cost, assuming
415/// that all preferences in SplitConstraints are met.
416/// If it is evident that no bundles will be live, abort early and return false.
417bool RAGreedy::addSplitConstraints(unsigned PhysReg, float &Cost) {
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000418 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000419 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000420
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000421 // Reset interference dependent info.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000422 SplitConstraints.resize(UseBlocks.size());
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000423 float StaticCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000424 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
425 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000426 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000427
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000428 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000429 Intf.moveToBlock(BC.Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000430 BC.Entry = BI.LiveIn ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
431 BC.Exit = BI.LiveOut ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000432
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000433 if (!Intf.hasInterference())
434 continue;
435
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000436 // Number of spill code instructions to insert.
437 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000438
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000439 // Interference for the live-in value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000440 if (BI.LiveIn) {
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000441 if (Intf.first() <= Indexes->getMBBStartIdx(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000442 BC.Entry = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000443 else if (Intf.first() < BI.FirstUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000444 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000445 else if (Intf.first() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000446 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000447 }
448
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000449 // Interference for the live-out value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000450 if (BI.LiveOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000451 if (Intf.last() >= SA->getLastSplitPoint(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000452 BC.Exit = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000453 else if (Intf.last() > BI.LastUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000454 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000455 else if (Intf.last() > (BI.LiveThrough ? BI.FirstUse : BI.Def))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000456 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000457 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000458
459 // Accumulate the total frequency of inserted spill code.
460 if (Ins)
461 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000462 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000463
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000464 // Add constraints for use-blocks. Note that these are the only constraints
465 // that may add a positive bias, it is downhill from here.
466 SpillPlacer->addConstraints(SplitConstraints);
467 if (SpillPlacer->getPositiveNodes() == 0)
468 return false;
469
470 Cost = StaticCost;
471
472 // Now handle the live-through blocks without uses. These can only add
473 // negative bias, so we can abort whenever there are no more positive nodes.
474 // Compute constraints for a group of 8 blocks at a time.
475 const unsigned GroupSize = 8;
476 SpillPlacement::BlockConstraint BCS[GroupSize];
477 unsigned B = 0;
478
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000479 ArrayRef<unsigned> ThroughBlocks = SA->getThroughBlocks();
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000480 for (unsigned i = 0; i != ThroughBlocks.size(); ++i) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000481 unsigned Number = ThroughBlocks[i];
482 assert(B < GroupSize && "Array overflow");
483 BCS[B].Number = Number;
484 Intf.moveToBlock(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000485
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000486 if (Intf.hasInterference()) {
487 // Interference for the live-in value.
488 if (Intf.first() <= Indexes->getMBBStartIdx(Number))
489 BCS[B].Entry = SpillPlacement::MustSpill;
490 else
491 BCS[B].Entry = SpillPlacement::PrefSpill;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000492
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000493 // Interference for the live-out value.
494 if (Intf.last() >= SA->getLastSplitPoint(Number))
495 BCS[B].Exit = SpillPlacement::MustSpill;
496 else
497 BCS[B].Exit = SpillPlacement::PrefSpill;
498 } else {
499 // No interference, transparent block.
500 BCS[B].Entry = BCS[B].Exit = SpillPlacement::DontCare;
501 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000502
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000503 if (++B == GroupSize) {
504 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
505 SpillPlacer->addConstraints(Array);
506 B = 0;
507 // Abort early when all hope is lost.
508 if (SpillPlacer->getPositiveNodes() == 0)
509 return false;
510 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000511 }
512
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000513 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
514 SpillPlacer->addConstraints(Array);
515 return SpillPlacer->getPositiveNodes() != 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000516}
517
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000518
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000519/// calcGlobalSplitCost - Return the global split cost of following the split
520/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000521/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000522///
523float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
524 float GlobalCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000525 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
526 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
527 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000528 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000529 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
530 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
531 unsigned Ins = 0;
532
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000533 if (BI.LiveIn)
534 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
535 if (BI.LiveOut)
536 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000537 if (Ins)
538 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000539 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000540
541 ArrayRef<unsigned> ThroughBlocks = SA->getThroughBlocks();
542 SplitConstraints.resize(UseBlocks.size() + ThroughBlocks.size());
543 for (unsigned i = 0; i != ThroughBlocks.size(); ++i) {
544 unsigned Number = ThroughBlocks[i];
545 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
546 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
547 if (RegIn != RegOut)
548 GlobalCost += SpillPlacer->getBlockFrequency(Number);
549 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000550 return GlobalCost;
551}
552
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000553/// splitAroundRegion - Split VirtReg around the region determined by
554/// LiveBundles. Make an effort to avoid interference from PhysReg.
555///
556/// The 'register' interval is going to contain as many uses as possible while
557/// avoiding interference. The 'stack' interval is the complement constructed by
558/// SplitEditor. It will contain the rest.
559///
560void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
561 const BitVector &LiveBundles,
562 SmallVectorImpl<LiveInterval*> &NewVRegs) {
563 DEBUG({
564 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
565 << " with bundles";
566 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
567 dbgs() << " EB#" << i;
568 dbgs() << ".\n";
569 });
570
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000571 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000572 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000573 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000574
575 // Create the main cross-block interval.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000576 SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000577
578 // First add all defs that are live out of a block.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000579 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
580 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
581 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000582 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
583 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
584
585 // Should the register be live out?
586 if (!BI.LiveOut || !RegOut)
587 continue;
588
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000589 SlotIndex Start, Stop;
590 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000591 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000592 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000593 << Bundles->getBundle(BI.MBB->getNumber(), 1)
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000594 << " [" << Start << ';'
595 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
596 << ") intf [" << Intf.first() << ';' << Intf.last() << ')');
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000597
598 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000599 assert((!Intf.hasInterference() || Intf.first() < Stop)
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000600 && "Bad interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000601 assert((!Intf.hasInterference() || Intf.last() > Start)
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000602 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000603
604 // Check interference leaving the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000605 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000606 // Block is interference-free.
607 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000608 if (!BI.LiveThrough) {
609 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000610 SE->useIntv(SE->enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000611 continue;
612 }
613 if (!RegIn) {
614 // Block is live-through, but entry bundle is on the stack.
615 // Reload just before the first use.
616 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000617 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000618 continue;
619 }
620 DEBUG(dbgs() << ", live-through.\n");
621 continue;
622 }
623
624 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000625 DEBUG(dbgs() << ", interference to " << Intf.last());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000626
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000627 if (!BI.LiveThrough && Intf.last() <= BI.Def) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000628 // The interference doesn't reach the outgoing segment.
629 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000630 SE->useIntv(BI.Def, Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000631 continue;
632 }
633
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000634 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000635 if (Intf.last().getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000636 // There are interference-free uses at the end of the block.
637 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000638 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000639 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000640 Intf.last().getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000641 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
642 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000643 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000644 // Only attempt a split befroe the last split point.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000645 if (Use.getBaseIndex() <= LastSplitPoint) {
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000646 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000647 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000648 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000649 assert(SegStart < LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000650 SE->useIntv(SegStart, Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000651 continue;
652 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000653 }
654
655 // Interference is after the last use.
656 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000657 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000658 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000659 }
660
661 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000662 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
663 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000664 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
665 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
666
667 // Is the register live-in?
668 if (!BI.LiveIn || !RegIn)
669 continue;
670
671 // We have an incoming register. Check for interference.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000672 SlotIndex Start, Stop;
673 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000674 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000675 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000676 << " -> BB#" << BI.MBB->getNumber() << " [" << Start << ';'
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000677 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
678 << ')');
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000679
680 // Check interference entering the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000681 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000682 // Block is interference-free.
683 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000684 if (!BI.LiveThrough) {
685 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000686 SE->useIntv(Start, SE->leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000687 continue;
688 }
689 if (!RegOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000690 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000691 // Block is live-through, but exit bundle is on the stack.
692 // Spill immediately after the last use.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000693 if (BI.LastUse < LastSplitPoint) {
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000694 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000695 SE->useIntv(Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000696 continue;
697 }
698 // The last use is after the last split point, it is probably an
699 // indirect jump.
700 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000701 << LastSplitPoint << ", stack-out.\n");
702 SlotIndex SegEnd = SE->leaveIntvBefore(LastSplitPoint);
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000703 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000704 // Run a double interval from the split to the last use.
705 // This makes it possible to spill the complement without affecting the
706 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000707 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000708 continue;
709 }
710 // Register is live-through.
711 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000712 SE->useIntv(Start, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000713 continue;
714 }
715
716 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000717 DEBUG(dbgs() << ", interference from " << Intf.first());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000718
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000719 if (!BI.LiveThrough && Intf.first() >= BI.Kill) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000720 // The interference doesn't reach the outgoing segment.
721 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000722 SE->useIntv(Start, BI.Kill);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000723 continue;
724 }
725
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000726 if (Intf.first().getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000727 // There are interference-free uses at the beginning of the block.
728 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000729 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000730 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000731 Intf.first().getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000732 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
733 SlotIndex Use = (--UI)->getBoundaryIndex();
734 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000735 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000736 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000737 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000738 continue;
739 }
740
741 // Interference is before the first use.
742 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000743 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000744 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000745 }
746
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000747 // Handle live-through blocks.
748 ArrayRef<unsigned> ThroughBlocks = SA->getThroughBlocks();
749 for (unsigned i = 0; i != ThroughBlocks.size(); ++i) {
750 unsigned Number = ThroughBlocks[i];
751 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
752 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
753 DEBUG(dbgs() << "Live through BB#" << Number << '\n');
754 if (RegIn && RegOut) {
755 Intf.moveToBlock(Number);
756 if (!Intf.hasInterference()) {
757 SE->useIntv(Indexes->getMBBStartIdx(Number),
758 Indexes->getMBBEndIdx(Number));
759 continue;
760 }
761 }
762 MachineBasicBlock *MBB = MF->getBlockNumbered(Number);
763 if (RegIn)
764 SE->leaveIntvAtTop(*MBB);
765 if (RegOut)
766 SE->enterIntvAtEnd(*MBB);
767 }
768
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000769 SE->closeIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000770
771 // FIXME: Should we be more aggressive about splitting the stack region into
772 // per-block segments? The current approach allows the stack region to
773 // separate into connected components. Some components may be allocatable.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000774 SE->finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000775 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000776
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000777 if (VerifyEnabled)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000778 MF->verify(this, "After splitting live range around region");
779}
780
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000781unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
782 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000783 BitVector LiveBundles, BestBundles;
784 float BestCost = 0;
785 unsigned BestReg = 0;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000786
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000787 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000788 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
789 if (GlobalCand.size() <= Cand)
790 GlobalCand.resize(Cand+1);
791 GlobalCand[Cand].PhysReg = PhysReg;
792
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000793 SpillPlacer->prepare(LiveBundles);
794 float Cost;
795 if (!addSplitConstraints(PhysReg, Cost)) {
796 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tno positive bias\n");
797 continue;
798 }
799 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tbiased = "
800 << SpillPlacer->getPositiveNodes() << ", static = " << Cost);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000801 if (BestReg && Cost >= BestCost) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000802 DEBUG(dbgs() << " worse than " << PrintReg(BestReg, TRI) << '\n');
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000803 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000804 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000805
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000806 SpillPlacer->finish();
807
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000808 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000809 if (!LiveBundles.any()) {
810 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000811 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000812 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000813
814 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000815 DEBUG({
816 dbgs() << ", total = " << Cost << " with bundles";
817 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
818 dbgs() << " EB#" << i;
819 dbgs() << ".\n";
820 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000821 if (!BestReg || Cost < BestCost) {
822 BestReg = PhysReg;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000823 BestCost = 0.98f * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000824 BestBundles.swap(LiveBundles);
825 }
826 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000827
828 if (!BestReg)
829 return 0;
830
831 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000832 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Region);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000833 return 0;
834}
835
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000836
837//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000838// Local Splitting
839//===----------------------------------------------------------------------===//
840
841
842/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
843/// in order to use PhysReg between two entries in SA->UseSlots.
844///
845/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
846///
847void RAGreedy::calcGapWeights(unsigned PhysReg,
848 SmallVectorImpl<float> &GapWeight) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000849 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
850 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000851 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
852 const unsigned NumGaps = Uses.size()-1;
853
854 // Start and end points for the interference check.
855 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
856 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
857
858 GapWeight.assign(NumGaps, 0.0f);
859
860 // Add interference from each overlapping register.
861 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
862 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
863 .checkInterference())
864 continue;
865
866 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
867 // so we don't need InterferenceQuery.
868 //
869 // Interference that overlaps an instruction is counted in both gaps
870 // surrounding the instruction. The exception is interference before
871 // StartIdx and after StopIdx.
872 //
873 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
874 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
875 // Skip the gaps before IntI.
876 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
877 if (++Gap == NumGaps)
878 break;
879 if (Gap == NumGaps)
880 break;
881
882 // Update the gaps covered by IntI.
883 const float weight = IntI.value()->weight;
884 for (; Gap != NumGaps; ++Gap) {
885 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
886 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
887 break;
888 }
889 if (Gap == NumGaps)
890 break;
891 }
892 }
893}
894
895/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
896/// before MI that has a slot index. If MI is the first mapped instruction in
897/// its block, return the block start index instead.
898///
899SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
900 assert(MI && "Missing MachineInstr");
901 const MachineBasicBlock *MBB = MI->getParent();
902 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
903 while (I != B)
904 if (!(--I)->isDebugValue() && !I->isCopy())
905 return Indexes->getInstructionIndex(I);
906 return Indexes->getMBBStartIdx(MBB);
907}
908
909/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
910/// real non-copy instruction for each instruction in SA->UseSlots.
911///
912void RAGreedy::calcPrevSlots() {
913 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
914 PrevSlot.clear();
915 PrevSlot.reserve(Uses.size());
916 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
917 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
918 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
919 }
920}
921
922/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
923/// be beneficial to split before UseSlots[i].
924///
925/// 0 is always a valid split point
926unsigned RAGreedy::nextSplitPoint(unsigned i) {
927 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
928 const unsigned Size = Uses.size();
929 assert(i != Size && "No split points after the end");
930 // Allow split before i when Uses[i] is not adjacent to the previous use.
931 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
932 ;
933 return i;
934}
935
936/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
937/// basic block.
938///
939unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
940 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000941 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
942 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000943
944 // Note that it is possible to have an interval that is live-in or live-out
945 // while only covering a single block - A phi-def can use undef values from
946 // predecessors, and the block could be a single-block loop.
947 // We don't bother doing anything clever about such a case, we simply assume
948 // that the interval is continuous from FirstUse to LastUse. We should make
949 // sure that we don't do anything illegal to such an interval, though.
950
951 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
952 if (Uses.size() <= 2)
953 return 0;
954 const unsigned NumGaps = Uses.size()-1;
955
956 DEBUG({
957 dbgs() << "tryLocalSplit: ";
958 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
959 dbgs() << ' ' << SA->UseSlots[i];
960 dbgs() << '\n';
961 });
962
963 // For every use, find the previous mapped non-copy instruction.
964 // We use this to detect valid split points, and to estimate new interval
965 // sizes.
966 calcPrevSlots();
967
968 unsigned BestBefore = NumGaps;
969 unsigned BestAfter = 0;
970 float BestDiff = 0;
971
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +0000972 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000973 SmallVector<float, 8> GapWeight;
974
975 Order.rewind();
976 while (unsigned PhysReg = Order.next()) {
977 // Keep track of the largest spill weight that would need to be evicted in
978 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
979 calcGapWeights(PhysReg, GapWeight);
980
981 // Try to find the best sequence of gaps to close.
982 // The new spill weight must be larger than any gap interference.
983
984 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
985 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
986
987 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
988 // It is the spill weight that needs to be evicted.
989 float MaxGap = GapWeight[0];
990 for (unsigned i = 1; i != SplitAfter; ++i)
991 MaxGap = std::max(MaxGap, GapWeight[i]);
992
993 for (;;) {
994 // Live before/after split?
995 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
996 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
997
998 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
999 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1000 << " i=" << MaxGap);
1001
1002 // Stop before the interval gets so big we wouldn't be making progress.
1003 if (!LiveBefore && !LiveAfter) {
1004 DEBUG(dbgs() << " all\n");
1005 break;
1006 }
1007 // Should the interval be extended or shrunk?
1008 bool Shrink = true;
1009 if (MaxGap < HUGE_VALF) {
1010 // Estimate the new spill weight.
1011 //
1012 // Each instruction reads and writes the register, except the first
1013 // instr doesn't read when !FirstLive, and the last instr doesn't write
1014 // when !LastLive.
1015 //
1016 // We will be inserting copies before and after, so the total number of
1017 // reads and writes is 2 * EstUses.
1018 //
1019 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1020 2*(LiveBefore + LiveAfter);
1021
1022 // Try to guess the size of the new interval. This should be trivial,
1023 // but the slot index of an inserted copy can be a lot smaller than the
1024 // instruction it is inserted before if there are many dead indexes
1025 // between them.
1026 //
1027 // We measure the distance from the instruction before SplitBefore to
1028 // get a conservative estimate.
1029 //
1030 // The final distance can still be different if inserting copies
1031 // triggers a slot index renumbering.
1032 //
1033 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1034 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1035 // Would this split be possible to allocate?
1036 // Never allocate all gaps, we wouldn't be making progress.
1037 float Diff = EstWeight - MaxGap;
1038 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1039 if (Diff > 0) {
1040 Shrink = false;
1041 if (Diff > BestDiff) {
1042 DEBUG(dbgs() << " (best)");
1043 BestDiff = Diff;
1044 BestBefore = SplitBefore;
1045 BestAfter = SplitAfter;
1046 }
1047 }
1048 }
1049
1050 // Try to shrink.
1051 if (Shrink) {
1052 SplitBefore = nextSplitPoint(SplitBefore);
1053 if (SplitBefore < SplitAfter) {
1054 DEBUG(dbgs() << " shrink\n");
1055 // Recompute the max when necessary.
1056 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1057 MaxGap = GapWeight[SplitBefore];
1058 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1059 MaxGap = std::max(MaxGap, GapWeight[i]);
1060 }
1061 continue;
1062 }
1063 MaxGap = 0;
1064 }
1065
1066 // Try to extend the interval.
1067 if (SplitAfter >= NumGaps) {
1068 DEBUG(dbgs() << " end\n");
1069 break;
1070 }
1071
1072 DEBUG(dbgs() << " extend\n");
1073 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1074 SplitAfter != e; ++SplitAfter)
1075 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1076 continue;
1077 }
1078 }
1079
1080 // Didn't find any candidates?
1081 if (BestBefore == NumGaps)
1082 return 0;
1083
1084 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1085 << '-' << Uses[BestAfter] << ", " << BestDiff
1086 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1087
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001088 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001089 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001090
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001091 SE->openIntv();
1092 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1093 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1094 SE->useIntv(SegStart, SegStop);
1095 SE->closeIntv();
1096 SE->finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001097 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001098 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001099
1100 return 0;
1101}
1102
1103//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001104// Live Range Splitting
1105//===----------------------------------------------------------------------===//
1106
1107/// trySplit - Try to split VirtReg or one of its interferences, making it
1108/// assignable.
1109/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1110unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1111 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001112 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001113 if (LIS->intervalIsInOneMBB(VirtReg)) {
1114 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001115 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001116 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001117 }
1118
1119 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001120
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001121 // Don't iterate global splitting.
1122 // Move straight to spilling if this range was produced by a global split.
1123 LiveRangeStage Stage = getStage(VirtReg);
1124 if (Stage >= RS_Block)
1125 return 0;
1126
1127 SA->analyze(&VirtReg);
1128
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001129 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001130 if (Stage < RS_Region) {
1131 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1132 if (PhysReg || !NewVRegs.empty())
1133 return PhysReg;
1134 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001135
1136 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001137 if (Stage < RS_Block) {
1138 SplitAnalysis::BlockPtrSet Blocks;
1139 if (SA->getMultiUseBlocks(Blocks)) {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001140 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001141 SE->reset(LREdit);
1142 SE->splitSingleBlocks(Blocks);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001143 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Block);
1144 if (VerifyEnabled)
1145 MF->verify(this, "After splitting live range around basic blocks");
1146 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001147 }
1148
1149 // Don't assign any physregs.
1150 return 0;
1151}
1152
1153
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001154//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001155// Main Entry Point
1156//===----------------------------------------------------------------------===//
1157
1158unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001159 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001160 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001161 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1162 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001163 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001164 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001165 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001166
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001167 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001168 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001169
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001170 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1171
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001172 // The first time we see a live range, don't try to split or spill.
1173 // Wait until the second time, when all smaller ranges have been allocated.
1174 // This gives a better picture of the interference to split around.
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001175 LiveRangeStage Stage = getStage(VirtReg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +00001176 if (Stage == RS_First) {
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001177 LRStage[VirtReg.reg] = RS_Second;
Jakob Stoklund Olesenc1655e12011-03-19 23:02:47 +00001178 DEBUG(dbgs() << "wait for second round\n");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001179 NewVRegs.push_back(&VirtReg);
1180 return 0;
1181 }
1182
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001183 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1184
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001185 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001186 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1187 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001188 return PhysReg;
1189
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001190 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001191 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen47dbf6c2011-03-10 01:51:42 +00001192 LiveRangeEdit LRE(VirtReg, NewVRegs, this);
1193 spiller().spill(LRE);
Jakob Stoklund Olesen6094bd82011-03-29 21:20:19 +00001194 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Spill);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001195
Jakob Stoklund Olesenc46570d2011-03-16 22:56:08 +00001196 if (VerifyEnabled)
1197 MF->verify(this, "After spilling");
1198
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001199 // The live virtual register requesting allocation was spilled, so tell
1200 // the caller not to allocate anything during this round.
1201 return 0;
1202}
1203
1204bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1205 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1206 << "********** Function: "
1207 << ((Value*)mf.getFunction())->getName() << '\n');
1208
1209 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001210 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001211 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001212
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001213 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001214 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001215 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001216 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001217 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001218 Loops = &getAnalysis<MachineLoopInfo>();
1219 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001220 Bundles = &getAnalysis<EdgeBundles>();
1221 SpillPlacer = &getAnalysis<SpillPlacement>();
1222
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001223 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001224 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001225 LRStage.clear();
1226 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +00001227 IntfCache.init(MF, &PhysReg2LiveUnion[0], Indexes, TRI);
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001228
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001229 allocatePhysRegs();
1230 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001231 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001232
1233 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001234 {
1235 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001236 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001237 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001238
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +00001239 // Write out new DBG_VALUE instructions.
1240 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM);
1241
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001242 // The pass output is in VirtRegMap. Release all the transient data.
1243 releaseMemory();
1244
1245 return true;
1246}