blob: f9e494f608e3ce5af418aa7c57ccc83595532da9 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesen5907d862011-04-02 06:03:35 +000017#include "InterferenceCache.h"
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +000018#include "LiveDebugVariables.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000019#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000020#include "RegAllocBase.h"
21#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000022#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000023#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000024#include "VirtRegMap.h"
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +000025#include "llvm/ADT/SparseBitVector.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000026#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000027#include "llvm/Analysis/AliasAnalysis.h"
28#include "llvm/Function.h"
29#include "llvm/PassAnalysisSupport.h"
30#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000031#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000032#include "llvm/CodeGen/LiveIntervalAnalysis.h"
33#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000034#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000035#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000037#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000038#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/CodeGen/Passes.h"
40#include "llvm/CodeGen/RegAllocRegistry.h"
41#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000042#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000043#include "llvm/Support/Debug.h"
44#include "llvm/Support/ErrorHandling.h"
45#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000046#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000047
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000048#include <queue>
49
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000050using namespace llvm;
51
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000052STATISTIC(NumGlobalSplits, "Number of split global live ranges");
53STATISTIC(NumLocalSplits, "Number of split local live ranges");
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000054STATISTIC(NumEvicted, "Number of interferences evicted");
55
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000056static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
57 createGreedyRegisterAllocator);
58
59namespace {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +000060class RAGreedy : public MachineFunctionPass,
61 public RegAllocBase,
62 private LiveRangeEdit::Delegate {
63
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000064 // context
65 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 BitVector ReservedRegs;
67
68 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000069 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000070 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000071 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000072 MachineLoopInfo *Loops;
73 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000074 EdgeBundles *Bundles;
75 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000076
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000077 // state
78 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000079 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000080
81 // Live ranges pass through a number of stages as we try to allocate them.
82 // Some of the stages may also create new live ranges:
83 //
84 // - Region splitting.
85 // - Per-block splitting.
86 // - Local splitting.
87 // - Spilling.
88 //
89 // Ranges produced by one of the stages skip the previous stages when they are
90 // dequeued. This improves performance because we can skip interference checks
91 // that are unlikely to give any results. It also guarantees that the live
92 // range splitting algorithm terminates, something that is otherwise hard to
93 // ensure.
94 enum LiveRangeStage {
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +000095 RS_New, ///< Never seen before.
96 RS_First, ///< First time in the queue.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000097 RS_Second, ///< Second time in the queue.
98 RS_Region, ///< Produced by region splitting.
99 RS_Block, ///< Produced by per-block splitting.
100 RS_Local, ///< Produced by local splitting.
101 RS_Spill ///< Produced by spilling.
102 };
103
104 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
105
106 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
107 return LiveRangeStage(LRStage[VirtReg.reg]);
108 }
109
110 template<typename Iterator>
111 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
112 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000113 for (;Begin != End; ++Begin) {
114 unsigned Reg = (*Begin)->reg;
115 if (LRStage[Reg] == RS_New)
116 LRStage[Reg] = NewStage;
117 }
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000118 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000119
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000120 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000121 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000122 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000123
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000124 /// Cached per-block interference maps
125 InterferenceCache IntfCache;
126
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000127 /// All basic blocks where the current register has uses.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000128 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000129
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000130 /// Live-through blocks that have already been added to SpillPlacer.
131 SparseBitVector<> ActiveThroughBlocks;
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000132
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000133 /// Global live range splitting candidate info.
134 struct GlobalSplitCandidate {
135 unsigned PhysReg;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000136 BitVector LiveBundles;
137 };
138
139 /// Candidate info for for each PhysReg in AllocationOrder.
140 /// This vector never shrinks, but grows to the size of the largest register
141 /// class.
142 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
143
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000144 /// For every instruction in SA->UseSlots, store the previous non-copy
145 /// instruction.
146 SmallVector<SlotIndex, 8> PrevSlot;
147
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000148public:
149 RAGreedy();
150
151 /// Return the pass name.
152 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000153 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000154 }
155
156 /// RAGreedy analysis usage.
157 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000158 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000159 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000160 virtual void enqueue(LiveInterval *LI);
161 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000162 virtual unsigned selectOrSplit(LiveInterval&,
163 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000164
165 /// Perform register allocation.
166 virtual bool runOnMachineFunction(MachineFunction &mf);
167
168 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000169
170private:
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000171 void LRE_WillEraseInstruction(MachineInstr*);
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000172 bool LRE_CanEraseVirtReg(unsigned);
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000173 void LRE_WillShrinkVirtReg(unsigned);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000174 void LRE_DidCloneVirtReg(unsigned, unsigned);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000175
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000176 bool addSplitConstraints(InterferenceCache::Cursor, float&);
177 void addThroughConstraints(InterferenceCache::Cursor, ArrayRef<unsigned>);
178 void growRegion(InterferenceCache::Cursor);
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000179 float calcGlobalSplitCost(unsigned, const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000180 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
181 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000182 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
183 SlotIndex getPrevMappedIndex(const MachineInstr*);
184 void calcPrevSlots();
185 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000186 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000187
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000188 unsigned tryEvict(LiveInterval&, AllocationOrder&,
189 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000190 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
191 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000192 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
193 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000194 unsigned trySplit(LiveInterval&, AllocationOrder&,
195 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000196};
197} // end anonymous namespace
198
199char RAGreedy::ID = 0;
200
201FunctionPass* llvm::createGreedyRegisterAllocator() {
202 return new RAGreedy();
203}
204
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000205RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_New) {
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000206 initializeLiveDebugVariablesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000207 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000208 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
209 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
210 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
211 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
212 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
213 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
214 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
215 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000216 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000217 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000218 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
219 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000220}
221
222void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
223 AU.setPreservesCFG();
224 AU.addRequired<AliasAnalysis>();
225 AU.addPreserved<AliasAnalysis>();
226 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000227 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000228 AU.addPreserved<SlotIndexes>();
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +0000229 AU.addRequired<LiveDebugVariables>();
230 AU.addPreserved<LiveDebugVariables>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000231 if (StrongPHIElim)
232 AU.addRequiredID(StrongPHIEliminationID);
233 AU.addRequiredTransitive<RegisterCoalescer>();
234 AU.addRequired<CalculateSpillWeights>();
235 AU.addRequired<LiveStacks>();
236 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000237 AU.addRequired<MachineDominatorTree>();
238 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000239 AU.addRequired<MachineLoopInfo>();
240 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000241 AU.addRequired<MachineLoopRanges>();
242 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000243 AU.addRequired<VirtRegMap>();
244 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000245 AU.addRequired<EdgeBundles>();
246 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000247 MachineFunctionPass::getAnalysisUsage(AU);
248}
249
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000250
251//===----------------------------------------------------------------------===//
252// LiveRangeEdit delegate methods
253//===----------------------------------------------------------------------===//
254
255void RAGreedy::LRE_WillEraseInstruction(MachineInstr *MI) {
256 // LRE itself will remove from SlotIndexes and parent basic block.
257 VRM->RemoveMachineInstrFromMaps(MI);
258}
259
Jakob Stoklund Olesen7792e982011-03-13 01:23:11 +0000260bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) {
261 if (unsigned PhysReg = VRM->getPhys(VirtReg)) {
262 unassign(LIS->getInterval(VirtReg), PhysReg);
263 return true;
264 }
265 // Unassigned virtreg is probably in the priority queue.
266 // RegAllocBase will erase it after dequeueing.
267 return false;
268}
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000269
Jakob Stoklund Olesen1d5b8452011-03-16 22:56:16 +0000270void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) {
271 unsigned PhysReg = VRM->getPhys(VirtReg);
272 if (!PhysReg)
273 return;
274
275 // Register is assigned, put it back on the queue for reassignment.
276 LiveInterval &LI = LIS->getInterval(VirtReg);
277 unassign(LI, PhysReg);
278 enqueue(&LI);
279}
280
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000281void RAGreedy::LRE_DidCloneVirtReg(unsigned New, unsigned Old) {
282 // LRE may clone a virtual register because dead code elimination causes it to
283 // be split into connected components. Ensure that the new register gets the
284 // same stage as the parent.
285 LRStage.grow(New);
286 LRStage[New] = LRStage[Old];
287}
288
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000289void RAGreedy::releaseMemory() {
290 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000291 LRStage.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000292 RegAllocBase::releaseMemory();
293}
294
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000295void RAGreedy::enqueue(LiveInterval *LI) {
296 // Prioritize live ranges by size, assigning larger ranges first.
297 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000298 const unsigned Size = LI->getSize();
299 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000300 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
301 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000302 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000303
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000304 LRStage.grow(Reg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +0000305 if (LRStage[Reg] == RS_New)
306 LRStage[Reg] = RS_First;
307
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000308 if (LRStage[Reg] == RS_Second)
309 // Unsplit ranges that couldn't be allocated immediately are deferred until
310 // everything else has been allocated. Long ranges are allocated last so
311 // they are split against realistic interference.
312 Prio = (1u << 31) - Size;
313 else {
314 // Everything else is allocated in long->short order. Long ranges that don't
315 // fit should be spilled ASAP so they don't create interference.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000316 Prio = (1u << 31) + Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000317
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000318 // Boost ranges that have a physical register hint.
319 if (TargetRegisterInfo::isPhysicalRegister(VRM->getRegAllocPref(Reg)))
320 Prio |= (1u << 30);
321 }
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000322
323 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000324}
325
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000326LiveInterval *RAGreedy::dequeue() {
327 if (Queue.empty())
328 return 0;
329 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
330 Queue.pop();
331 return LI;
332}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000333
334//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000335// Interference eviction
336//===----------------------------------------------------------------------===//
337
338/// canEvict - Return true if all interferences between VirtReg and PhysReg can
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000339/// be evicted.
340/// Return false if any interference is heavier than MaxWeight.
341/// On return, set MaxWeight to the maximal spill weight of an interference.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000342bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000343 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000344 float Weight = 0;
345 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
346 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000347 // If there is 10 or more interferences, chances are one is heavier.
348 if (Q.collectInterferingVRegs(10, MaxWeight) >= 10)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000349 return false;
350
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000351 // Check if any interfering live range is heavier than MaxWeight.
352 for (unsigned i = Q.interferingVRegs().size(); i; --i) {
353 LiveInterval *Intf = Q.interferingVRegs()[i - 1];
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000354 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
355 return false;
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000356 if (Intf->weight >= MaxWeight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000357 return false;
358 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000359 }
360 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000361 MaxWeight = Weight;
362 return true;
363}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000364
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000365/// tryEvict - Try to evict all interferences for a physreg.
366/// @param VirtReg Currently unassigned virtual register.
367/// @param Order Physregs to try.
368/// @return Physreg to assign VirtReg, or 0.
369unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
370 AllocationOrder &Order,
371 SmallVectorImpl<LiveInterval*> &NewVRegs){
372 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
373
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000374 // Keep track of the lightest single interference seen so far.
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000375 float BestWeight = VirtReg.weight;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000376 unsigned BestPhys = 0;
377
378 Order.rewind();
379 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000380 float Weight = BestWeight;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000381 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000382 continue;
383
384 // This is an eviction candidate.
Jakob Stoklund Olesen3f5bedf2011-04-11 21:47:01 +0000385 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " interference = "
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000386 << Weight << '\n');
387 if (BestPhys && Weight >= BestWeight)
388 continue;
389
390 // Best so far.
391 BestPhys = PhysReg;
392 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000393 // Stop if the hint can be used.
394 if (Order.isHint(PhysReg))
395 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000396 }
397
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000398 if (!BestPhys)
399 return 0;
400
401 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
402 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
403 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
404 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
405 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
406 LiveInterval *Intf = Q.interferingVRegs()[i];
407 unassign(*Intf, VRM->getPhys(Intf->reg));
408 ++NumEvicted;
409 NewVRegs.push_back(Intf);
410 }
411 }
412 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000413}
414
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000415
416//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000417// Region Splitting
418//===----------------------------------------------------------------------===//
419
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000420/// addSplitConstraints - Fill out the SplitConstraints vector based on the
421/// interference pattern in Physreg and its aliases. Add the constraints to
422/// SpillPlacement and return the static cost of this split in Cost, assuming
423/// that all preferences in SplitConstraints are met.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000424/// Return false if there are no bundles with positive bias.
425bool RAGreedy::addSplitConstraints(InterferenceCache::Cursor Intf,
426 float &Cost) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000427 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000428
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000429 // Reset interference dependent info.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000430 SplitConstraints.resize(UseBlocks.size());
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000431 float StaticCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000432 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
433 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000434 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000435
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000436 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000437 Intf.moveToBlock(BC.Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000438 BC.Entry = BI.LiveIn ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
439 BC.Exit = BI.LiveOut ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000440
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000441 if (!Intf.hasInterference())
442 continue;
443
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000444 // Number of spill code instructions to insert.
445 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000446
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000447 // Interference for the live-in value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000448 if (BI.LiveIn) {
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000449 if (Intf.first() <= Indexes->getMBBStartIdx(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000450 BC.Entry = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000451 else if (Intf.first() < BI.FirstUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000452 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000453 else if (Intf.first() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000454 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000455 }
456
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000457 // Interference for the live-out value.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000458 if (BI.LiveOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000459 if (Intf.last() >= SA->getLastSplitPoint(BC.Number))
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000460 BC.Exit = SpillPlacement::MustSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000461 else if (Intf.last() > BI.LastUse)
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000462 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000463 else if (Intf.last() > (BI.LiveThrough ? BI.FirstUse : BI.Def))
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000464 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000465 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000466
467 // Accumulate the total frequency of inserted spill code.
468 if (Ins)
469 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000470 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000471 Cost = StaticCost;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000472
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000473 // Add constraints for use-blocks. Note that these are the only constraints
474 // that may add a positive bias, it is downhill from here.
475 SpillPlacer->addConstraints(SplitConstraints);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000476 return SpillPlacer->scanActiveBundles();
477}
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000478
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000479
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000480/// addThroughConstraints - Add constraints and links to SpillPlacer from the
481/// live-through blocks in Blocks.
482void RAGreedy::addThroughConstraints(InterferenceCache::Cursor Intf,
483 ArrayRef<unsigned> Blocks) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000484 const unsigned GroupSize = 8;
485 SpillPlacement::BlockConstraint BCS[GroupSize];
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000486 unsigned TBS[GroupSize];
487 unsigned B = 0, T = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000488
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000489 for (unsigned i = 0; i != Blocks.size(); ++i) {
490 unsigned Number = Blocks[i];
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000491 Intf.moveToBlock(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000492
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000493 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000494 assert(T < GroupSize && "Array overflow");
495 TBS[T] = Number;
496 if (++T == GroupSize) {
497 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
498 T = 0;
499 }
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000500 continue;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000501 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000502
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000503 assert(B < GroupSize && "Array overflow");
504 BCS[B].Number = Number;
505
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000506 // Interference for the live-in value.
507 if (Intf.first() <= Indexes->getMBBStartIdx(Number))
508 BCS[B].Entry = SpillPlacement::MustSpill;
509 else
510 BCS[B].Entry = SpillPlacement::PrefSpill;
511
512 // Interference for the live-out value.
513 if (Intf.last() >= SA->getLastSplitPoint(Number))
514 BCS[B].Exit = SpillPlacement::MustSpill;
515 else
516 BCS[B].Exit = SpillPlacement::PrefSpill;
517
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000518 if (++B == GroupSize) {
519 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
520 SpillPlacer->addConstraints(Array);
521 B = 0;
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000522 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000523 }
524
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000525 ArrayRef<SpillPlacement::BlockConstraint> Array(BCS, B);
526 SpillPlacer->addConstraints(Array);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000527 SpillPlacer->addLinks(ArrayRef<unsigned>(TBS, T));
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000528}
529
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000530void RAGreedy::growRegion(InterferenceCache::Cursor Intf) {
531 // Keep track of through blocks that have already been added to SpillPlacer.
532 SparseBitVector<> Added;
533 SmallVector<unsigned, 16> ThroughBlocks;
534#ifndef NDEBUG
535 unsigned Visited = 0;
536#endif
537 for (;;) {
538 ArrayRef<unsigned> NewBundles = SpillPlacer->getRecentPositive();
539 if (NewBundles.empty())
540 break;
541 // Find new through blocks in the periphery of PrefRegBundles.
542 for (int i = 0, e = NewBundles.size(); i != e; ++i) {
543 unsigned Bundle = NewBundles[i];
544 // Look at all blocks connected to Bundle in the full graph.
545 ArrayRef<unsigned> Blocks = Bundles->getBlocks(Bundle);
546 for (ArrayRef<unsigned>::iterator I = Blocks.begin(), E = Blocks.end();
547 I != E; ++I) {
548 unsigned Block = *I;
549 if (!SA->isThroughBlock(Block) || !Added.test_and_set(Block))
550 continue;
551 // This is a new through block. Add it to SpillPlacer later.
552 ThroughBlocks.push_back(Block);
553#ifndef NDEBUG
554 ++Visited;
555#endif
556 }
557 }
558 // Any new blocks to add?
559 if (!ThroughBlocks.empty()) {
560 addThroughConstraints(Intf, ThroughBlocks);
561 ThroughBlocks.clear();
562 }
563 // Perhaps iterating can enable more bundles?
564 SpillPlacer->iterate();
565 }
566
567 // Rememeber the relevant set of through blocks for splitAroundRegion().
568 ActiveThroughBlocks |= Added;
569 DEBUG(dbgs() << ", v=" << Visited);
570}
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000571
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000572/// calcGlobalSplitCost - Return the global split cost of following the split
573/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000574/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000575///
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000576float RAGreedy::calcGlobalSplitCost(unsigned PhysReg,
577 const BitVector &LiveBundles) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000578 float GlobalCost = 0;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000579 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
580 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
581 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000582 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000583 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
584 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
585 unsigned Ins = 0;
586
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000587 if (BI.LiveIn)
588 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
589 if (BI.LiveOut)
590 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000591 if (Ins)
592 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000593 }
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000594
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000595 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000596 for (SparseBitVector<>::iterator I = ActiveThroughBlocks.begin(),
597 E = ActiveThroughBlocks.end(); I != E; ++I) {
598 unsigned Number = *I;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000599 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
600 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000601 if (!RegIn && !RegOut)
602 continue;
603 if (RegIn && RegOut) {
604 // We need double spill code if this block has interference.
605 Intf.moveToBlock(Number);
606 if (Intf.hasInterference())
607 GlobalCost += 2*SpillPlacer->getBlockFrequency(Number);
608 continue;
609 }
610 // live-in / stack-out or stack-in live-out.
611 GlobalCost += SpillPlacer->getBlockFrequency(Number);
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000612 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000613 return GlobalCost;
614}
615
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000616/// splitAroundRegion - Split VirtReg around the region determined by
617/// LiveBundles. Make an effort to avoid interference from PhysReg.
618///
619/// The 'register' interval is going to contain as many uses as possible while
620/// avoiding interference. The 'stack' interval is the complement constructed by
621/// SplitEditor. It will contain the rest.
622///
623void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
624 const BitVector &LiveBundles,
625 SmallVectorImpl<LiveInterval*> &NewVRegs) {
626 DEBUG({
627 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
628 << " with bundles";
629 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
630 dbgs() << " EB#" << i;
631 dbgs() << ".\n";
632 });
633
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000634 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000635 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000636 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000637
638 // Create the main cross-block interval.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000639 SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000640
641 // First add all defs that are live out of a block.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000642 ArrayRef<SplitAnalysis::BlockInfo> UseBlocks = SA->getUseBlocks();
643 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
644 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000645 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
646 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
647
648 // Should the register be live out?
649 if (!BI.LiveOut || !RegOut)
650 continue;
651
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000652 SlotIndex Start, Stop;
653 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000654 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000655 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000656 << Bundles->getBundle(BI.MBB->getNumber(), 1)
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000657 << " [" << Start << ';'
658 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
659 << ") intf [" << Intf.first() << ';' << Intf.last() << ')');
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000660
661 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000662 assert((!Intf.hasInterference() || Intf.first() < Stop)
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000663 && "Bad interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000664 assert((!Intf.hasInterference() || Intf.last() > Start)
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000665 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000666
667 // Check interference leaving the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000668 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000669 // Block is interference-free.
670 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000671 if (!BI.LiveThrough) {
672 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000673 SE->useIntv(SE->enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000674 continue;
675 }
676 if (!RegIn) {
677 // Block is live-through, but entry bundle is on the stack.
678 // Reload just before the first use.
679 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000680 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000681 continue;
682 }
683 DEBUG(dbgs() << ", live-through.\n");
684 continue;
685 }
686
687 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000688 DEBUG(dbgs() << ", interference to " << Intf.last());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000689
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000690 if (!BI.LiveThrough && Intf.last() <= BI.Def) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000691 // The interference doesn't reach the outgoing segment.
692 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000693 SE->useIntv(BI.Def, Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000694 continue;
695 }
696
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000697 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000698 if (Intf.last().getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000699 // There are interference-free uses at the end of the block.
700 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000701 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000702 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000703 Intf.last().getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000704 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
705 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000706 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000707 // Only attempt a split befroe the last split point.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000708 if (Use.getBaseIndex() <= LastSplitPoint) {
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000709 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000710 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000711 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000712 assert(SegStart < LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000713 SE->useIntv(SegStart, Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000714 continue;
715 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000716 }
717
718 // Interference is after the last use.
719 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000720 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000721 assert(SegStart >= Intf.last() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000722 }
723
724 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000725 for (unsigned i = 0; i != UseBlocks.size(); ++i) {
726 const SplitAnalysis::BlockInfo &BI = UseBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000727 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
728 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
729
730 // Is the register live-in?
731 if (!BI.LiveIn || !RegIn)
732 continue;
733
734 // We have an incoming register. Check for interference.
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000735 SlotIndex Start, Stop;
736 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000737 Intf.moveToBlock(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000738 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000739 << " -> BB#" << BI.MBB->getNumber() << " [" << Start << ';'
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000740 << SA->getLastSplitPoint(BI.MBB->getNumber()) << '-' << Stop
741 << ')');
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000742
743 // Check interference entering the block.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000744 if (!Intf.hasInterference()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000745 // Block is interference-free.
746 DEBUG(dbgs() << ", no interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000747 if (!BI.LiveThrough) {
748 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000749 SE->useIntv(Start, SE->leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000750 continue;
751 }
752 if (!RegOut) {
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000753 SlotIndex LastSplitPoint = SA->getLastSplitPoint(BI.MBB->getNumber());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000754 // Block is live-through, but exit bundle is on the stack.
755 // Spill immediately after the last use.
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000756 if (BI.LastUse < LastSplitPoint) {
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000757 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000758 SE->useIntv(Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000759 continue;
760 }
761 // The last use is after the last split point, it is probably an
762 // indirect jump.
763 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
Jakob Stoklund Olesen612f7802011-04-05 04:20:29 +0000764 << LastSplitPoint << ", stack-out.\n");
765 SlotIndex SegEnd = SE->leaveIntvBefore(LastSplitPoint);
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000766 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000767 // Run a double interval from the split to the last use.
768 // This makes it possible to spill the complement without affecting the
769 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000770 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000771 continue;
772 }
773 // Register is live-through.
774 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000775 SE->useIntv(Start, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000776 continue;
777 }
778
779 // Block has interference.
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000780 DEBUG(dbgs() << ", interference from " << Intf.first());
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000781
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000782 if (!BI.LiveThrough && Intf.first() >= BI.Kill) {
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000783 // The interference doesn't reach the outgoing segment.
784 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000785 SE->useIntv(Start, BI.Kill);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000786 continue;
787 }
788
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000789 if (Intf.first().getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000790 // There are interference-free uses at the beginning of the block.
791 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000792 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000793 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000794 Intf.first().getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000795 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
796 SlotIndex Use = (--UI)->getBoundaryIndex();
797 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000798 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000799 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesen6c8afd72011-04-04 15:32:15 +0000800 SE->useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000801 continue;
802 }
803
804 // Interference is before the first use.
805 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000806 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +0000807 assert(SegEnd <= Intf.first() && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000808 }
809
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000810 // Handle live-through blocks.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000811 for (SparseBitVector<>::iterator I = ActiveThroughBlocks.begin(),
812 E = ActiveThroughBlocks.end(); I != E; ++I) {
813 unsigned Number = *I;
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000814 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
815 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
816 DEBUG(dbgs() << "Live through BB#" << Number << '\n');
817 if (RegIn && RegOut) {
818 Intf.moveToBlock(Number);
819 if (!Intf.hasInterference()) {
820 SE->useIntv(Indexes->getMBBStartIdx(Number),
821 Indexes->getMBBEndIdx(Number));
822 continue;
823 }
824 }
825 MachineBasicBlock *MBB = MF->getBlockNumbered(Number);
826 if (RegIn)
827 SE->leaveIntvAtTop(*MBB);
828 if (RegOut)
829 SE->enterIntvAtEnd(*MBB);
830 }
831
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000832 SE->closeIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000833
834 // FIXME: Should we be more aggressive about splitting the stack region into
835 // per-block segments? The current approach allows the stack region to
836 // separate into connected components. Some components may be allocatable.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000837 SE->finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000838 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000839
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +0000840 if (VerifyEnabled)
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000841 MF->verify(this, "After splitting live range around region");
842}
843
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000844unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
845 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000846 BitVector LiveBundles, BestBundles;
847 float BestCost = 0;
848 unsigned BestReg = 0;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000849 ActiveThroughBlocks.clear();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000850
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000851 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000852 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
853 if (GlobalCand.size() <= Cand)
854 GlobalCand.resize(Cand+1);
855 GlobalCand[Cand].PhysReg = PhysReg;
856
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000857 SpillPlacer->prepare(LiveBundles);
858 float Cost;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000859 InterferenceCache::Cursor Intf(IntfCache, PhysReg);
860 if (!addSplitConstraints(Intf, Cost)) {
861 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tno positive bundles\n");
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000862 continue;
863 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000864 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tstatic = " << Cost);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000865 if (BestReg && Cost >= BestCost) {
Jakob Stoklund Olesen1b400e82011-04-06 21:32:38 +0000866 DEBUG(dbgs() << " worse than " << PrintReg(BestReg, TRI) << '\n');
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000867 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000868 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000869 growRegion(Intf);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000870
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000871 SpillPlacer->finish();
872
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000873 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000874 if (!LiveBundles.any()) {
875 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000876 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000877 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000878
Jakob Stoklund Olesen9a543522011-04-06 21:32:41 +0000879 Cost += calcGlobalSplitCost(PhysReg, LiveBundles);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000880 DEBUG({
881 dbgs() << ", total = " << Cost << " with bundles";
882 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
883 dbgs() << " EB#" << i;
884 dbgs() << ".\n";
885 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000886 if (!BestReg || Cost < BestCost) {
887 BestReg = PhysReg;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000888 BestCost = 0.98f * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000889 BestBundles.swap(LiveBundles);
890 }
891 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000892
893 if (!BestReg)
894 return 0;
895
896 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000897 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Region);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000898 return 0;
899}
900
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000901
902//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000903// Local Splitting
904//===----------------------------------------------------------------------===//
905
906
907/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
908/// in order to use PhysReg between two entries in SA->UseSlots.
909///
910/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
911///
912void RAGreedy::calcGapWeights(unsigned PhysReg,
913 SmallVectorImpl<float> &GapWeight) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +0000914 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
915 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000916 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
917 const unsigned NumGaps = Uses.size()-1;
918
919 // Start and end points for the interference check.
920 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
921 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
922
923 GapWeight.assign(NumGaps, 0.0f);
924
925 // Add interference from each overlapping register.
926 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
927 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
928 .checkInterference())
929 continue;
930
931 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
932 // so we don't need InterferenceQuery.
933 //
934 // Interference that overlaps an instruction is counted in both gaps
935 // surrounding the instruction. The exception is interference before
936 // StartIdx and after StopIdx.
937 //
938 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
939 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
940 // Skip the gaps before IntI.
941 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
942 if (++Gap == NumGaps)
943 break;
944 if (Gap == NumGaps)
945 break;
946
947 // Update the gaps covered by IntI.
948 const float weight = IntI.value()->weight;
949 for (; Gap != NumGaps; ++Gap) {
950 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
951 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
952 break;
953 }
954 if (Gap == NumGaps)
955 break;
956 }
957 }
958}
959
960/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
961/// before MI that has a slot index. If MI is the first mapped instruction in
962/// its block, return the block start index instead.
963///
964SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
965 assert(MI && "Missing MachineInstr");
966 const MachineBasicBlock *MBB = MI->getParent();
967 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
968 while (I != B)
969 if (!(--I)->isDebugValue() && !I->isCopy())
970 return Indexes->getInstructionIndex(I);
971 return Indexes->getMBBStartIdx(MBB);
972}
973
974/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
975/// real non-copy instruction for each instruction in SA->UseSlots.
976///
977void RAGreedy::calcPrevSlots() {
978 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
979 PrevSlot.clear();
980 PrevSlot.reserve(Uses.size());
981 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
982 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
983 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
984 }
985}
986
987/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
988/// be beneficial to split before UseSlots[i].
989///
990/// 0 is always a valid split point
991unsigned RAGreedy::nextSplitPoint(unsigned i) {
992 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
993 const unsigned Size = Uses.size();
994 assert(i != Size && "No split points after the end");
995 // Allow split before i when Uses[i] is not adjacent to the previous use.
996 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
997 ;
998 return i;
999}
1000
1001/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1002/// basic block.
1003///
1004unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1005 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesendb529a82011-04-06 03:57:00 +00001006 assert(SA->getUseBlocks().size() == 1 && "Not a local interval");
1007 const SplitAnalysis::BlockInfo &BI = SA->getUseBlocks().front();
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001008
1009 // Note that it is possible to have an interval that is live-in or live-out
1010 // while only covering a single block - A phi-def can use undef values from
1011 // predecessors, and the block could be a single-block loop.
1012 // We don't bother doing anything clever about such a case, we simply assume
1013 // that the interval is continuous from FirstUse to LastUse. We should make
1014 // sure that we don't do anything illegal to such an interval, though.
1015
1016 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1017 if (Uses.size() <= 2)
1018 return 0;
1019 const unsigned NumGaps = Uses.size()-1;
1020
1021 DEBUG({
1022 dbgs() << "tryLocalSplit: ";
1023 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1024 dbgs() << ' ' << SA->UseSlots[i];
1025 dbgs() << '\n';
1026 });
1027
1028 // For every use, find the previous mapped non-copy instruction.
1029 // We use this to detect valid split points, and to estimate new interval
1030 // sizes.
1031 calcPrevSlots();
1032
1033 unsigned BestBefore = NumGaps;
1034 unsigned BestAfter = 0;
1035 float BestDiff = 0;
1036
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +00001037 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001038 SmallVector<float, 8> GapWeight;
1039
1040 Order.rewind();
1041 while (unsigned PhysReg = Order.next()) {
1042 // Keep track of the largest spill weight that would need to be evicted in
1043 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1044 calcGapWeights(PhysReg, GapWeight);
1045
1046 // Try to find the best sequence of gaps to close.
1047 // The new spill weight must be larger than any gap interference.
1048
1049 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1050 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1051
1052 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1053 // It is the spill weight that needs to be evicted.
1054 float MaxGap = GapWeight[0];
1055 for (unsigned i = 1; i != SplitAfter; ++i)
1056 MaxGap = std::max(MaxGap, GapWeight[i]);
1057
1058 for (;;) {
1059 // Live before/after split?
1060 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1061 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1062
1063 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1064 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1065 << " i=" << MaxGap);
1066
1067 // Stop before the interval gets so big we wouldn't be making progress.
1068 if (!LiveBefore && !LiveAfter) {
1069 DEBUG(dbgs() << " all\n");
1070 break;
1071 }
1072 // Should the interval be extended or shrunk?
1073 bool Shrink = true;
1074 if (MaxGap < HUGE_VALF) {
1075 // Estimate the new spill weight.
1076 //
1077 // Each instruction reads and writes the register, except the first
1078 // instr doesn't read when !FirstLive, and the last instr doesn't write
1079 // when !LastLive.
1080 //
1081 // We will be inserting copies before and after, so the total number of
1082 // reads and writes is 2 * EstUses.
1083 //
1084 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1085 2*(LiveBefore + LiveAfter);
1086
1087 // Try to guess the size of the new interval. This should be trivial,
1088 // but the slot index of an inserted copy can be a lot smaller than the
1089 // instruction it is inserted before if there are many dead indexes
1090 // between them.
1091 //
1092 // We measure the distance from the instruction before SplitBefore to
1093 // get a conservative estimate.
1094 //
1095 // The final distance can still be different if inserting copies
1096 // triggers a slot index renumbering.
1097 //
1098 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1099 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1100 // Would this split be possible to allocate?
1101 // Never allocate all gaps, we wouldn't be making progress.
1102 float Diff = EstWeight - MaxGap;
1103 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1104 if (Diff > 0) {
1105 Shrink = false;
1106 if (Diff > BestDiff) {
1107 DEBUG(dbgs() << " (best)");
1108 BestDiff = Diff;
1109 BestBefore = SplitBefore;
1110 BestAfter = SplitAfter;
1111 }
1112 }
1113 }
1114
1115 // Try to shrink.
1116 if (Shrink) {
1117 SplitBefore = nextSplitPoint(SplitBefore);
1118 if (SplitBefore < SplitAfter) {
1119 DEBUG(dbgs() << " shrink\n");
1120 // Recompute the max when necessary.
1121 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1122 MaxGap = GapWeight[SplitBefore];
1123 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1124 MaxGap = std::max(MaxGap, GapWeight[i]);
1125 }
1126 continue;
1127 }
1128 MaxGap = 0;
1129 }
1130
1131 // Try to extend the interval.
1132 if (SplitAfter >= NumGaps) {
1133 DEBUG(dbgs() << " end\n");
1134 break;
1135 }
1136
1137 DEBUG(dbgs() << " extend\n");
1138 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1139 SplitAfter != e; ++SplitAfter)
1140 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1141 continue;
1142 }
1143 }
1144
1145 // Didn't find any candidates?
1146 if (BestBefore == NumGaps)
1147 return 0;
1148
1149 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1150 << '-' << Uses[BestAfter] << ", " << BestDiff
1151 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1152
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001153 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001154 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001155
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001156 SE->openIntv();
1157 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1158 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1159 SE->useIntv(SegStart, SegStop);
1160 SE->closeIntv();
1161 SE->finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001162 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001163 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001164
1165 return 0;
1166}
1167
1168//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001169// Live Range Splitting
1170//===----------------------------------------------------------------------===//
1171
1172/// trySplit - Try to split VirtReg or one of its interferences, making it
1173/// assignable.
1174/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1175unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1176 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001177 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001178 if (LIS->intervalIsInOneMBB(VirtReg)) {
1179 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001180 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001181 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001182 }
1183
1184 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001185
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001186 // Don't iterate global splitting.
1187 // Move straight to spilling if this range was produced by a global split.
1188 LiveRangeStage Stage = getStage(VirtReg);
1189 if (Stage >= RS_Block)
1190 return 0;
1191
1192 SA->analyze(&VirtReg);
1193
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001194 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001195 if (Stage < RS_Region) {
1196 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1197 if (PhysReg || !NewVRegs.empty())
1198 return PhysReg;
1199 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001200
1201 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001202 if (Stage < RS_Block) {
1203 SplitAnalysis::BlockPtrSet Blocks;
1204 if (SA->getMultiUseBlocks(Blocks)) {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001205 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001206 SE->reset(LREdit);
1207 SE->splitSingleBlocks(Blocks);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001208 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Block);
1209 if (VerifyEnabled)
1210 MF->verify(this, "After splitting live range around basic blocks");
1211 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001212 }
1213
1214 // Don't assign any physregs.
1215 return 0;
1216}
1217
1218
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001219//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001220// Main Entry Point
1221//===----------------------------------------------------------------------===//
1222
1223unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001224 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001225 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001226 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1227 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001228 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001229 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001230 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001231
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001232 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001233 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001234
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001235 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1236
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001237 // The first time we see a live range, don't try to split or spill.
1238 // Wait until the second time, when all smaller ranges have been allocated.
1239 // This gives a better picture of the interference to split around.
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001240 LiveRangeStage Stage = getStage(VirtReg);
Jakob Stoklund Olesenf22ca3f2011-03-30 02:52:39 +00001241 if (Stage == RS_First) {
Jakob Stoklund Oleseneb291572011-03-27 22:49:21 +00001242 LRStage[VirtReg.reg] = RS_Second;
Jakob Stoklund Olesenc1655e12011-03-19 23:02:47 +00001243 DEBUG(dbgs() << "wait for second round\n");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001244 NewVRegs.push_back(&VirtReg);
1245 return 0;
1246 }
1247
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001248 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1249
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001250 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001251 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1252 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001253 return PhysReg;
1254
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001255 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001256 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen47dbf6c2011-03-10 01:51:42 +00001257 LiveRangeEdit LRE(VirtReg, NewVRegs, this);
1258 spiller().spill(LRE);
Jakob Stoklund Olesen6094bd82011-03-29 21:20:19 +00001259 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Spill);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001260
Jakob Stoklund Olesenc46570d2011-03-16 22:56:08 +00001261 if (VerifyEnabled)
1262 MF->verify(this, "After spilling");
1263
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001264 // The live virtual register requesting allocation was spilled, so tell
1265 // the caller not to allocate anything during this round.
1266 return 0;
1267}
1268
1269bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1270 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1271 << "********** Function: "
1272 << ((Value*)mf.getFunction())->getName() << '\n');
1273
1274 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001275 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001276 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001277
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001278 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001279 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001280 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001281 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001282 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001283 Loops = &getAnalysis<MachineLoopInfo>();
1284 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001285 Bundles = &getAnalysis<EdgeBundles>();
1286 SpillPlacer = &getAnalysis<SpillPlacement>();
1287
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001288 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001289 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001290 LRStage.clear();
1291 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Oleseneda0fe82011-04-02 06:03:38 +00001292 IntfCache.init(MF, &PhysReg2LiveUnion[0], Indexes, TRI);
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001293
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001294 allocatePhysRegs();
1295 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001296 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001297
1298 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001299 {
1300 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001301 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001302 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001303
Jakob Stoklund Olesencfafc542011-04-05 21:40:37 +00001304 // Write out new DBG_VALUE instructions.
1305 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM);
1306
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001307 // The pass output is in VirtRegMap. Release all the transient data.
1308 releaseMemory();
1309
1310 return true;
1311}