blob: 917e64049c6bf0de7d8244505e70287c15936647 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000024#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000046#include <queue>
47
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000048using namespace llvm;
49
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000050STATISTIC(NumGlobalSplits, "Number of split global live ranges");
51STATISTIC(NumLocalSplits, "Number of split local live ranges");
52STATISTIC(NumReassigned, "Number of interferences reassigned");
53STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
59class RAGreedy : public MachineFunctionPass, public RegAllocBase {
60 // context
61 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000062 BitVector ReservedRegs;
63
64 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000065 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000067 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 MachineLoopInfo *Loops;
69 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 EdgeBundles *Bundles;
71 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000072
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000073 // state
74 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000075 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000076
77 // Live ranges pass through a number of stages as we try to allocate them.
78 // Some of the stages may also create new live ranges:
79 //
80 // - Region splitting.
81 // - Per-block splitting.
82 // - Local splitting.
83 // - Spilling.
84 //
85 // Ranges produced by one of the stages skip the previous stages when they are
86 // dequeued. This improves performance because we can skip interference checks
87 // that are unlikely to give any results. It also guarantees that the live
88 // range splitting algorithm terminates, something that is otherwise hard to
89 // ensure.
90 enum LiveRangeStage {
91 RS_Original, ///< Never seen before, never split.
92 RS_Second, ///< Second time in the queue.
93 RS_Region, ///< Produced by region splitting.
94 RS_Block, ///< Produced by per-block splitting.
95 RS_Local, ///< Produced by local splitting.
96 RS_Spill ///< Produced by spilling.
97 };
98
99 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
100
101 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
102 return LiveRangeStage(LRStage[VirtReg.reg]);
103 }
104
105 template<typename Iterator>
106 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
107 LRStage.resize(MRI->getNumVirtRegs());
108 for (;Begin != End; ++Begin)
109 LRStage[(*Begin)->reg] = NewStage;
110 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000111
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000112 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000113 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000114 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000115
116 /// All basic blocks where the current register is live.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000117 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000118
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000119 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
120
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000121 /// Global live range splitting candidate info.
122 struct GlobalSplitCandidate {
123 unsigned PhysReg;
124 SmallVector<IndexPair, 8> Interference;
125 BitVector LiveBundles;
126 };
127
128 /// Candidate info for for each PhysReg in AllocationOrder.
129 /// This vector never shrinks, but grows to the size of the largest register
130 /// class.
131 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
132
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000133 /// For every instruction in SA->UseSlots, store the previous non-copy
134 /// instruction.
135 SmallVector<SlotIndex, 8> PrevSlot;
136
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000137public:
138 RAGreedy();
139
140 /// Return the pass name.
141 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000142 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000143 }
144
145 /// RAGreedy analysis usage.
146 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000147 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000148 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000149 virtual void enqueue(LiveInterval *LI);
150 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000151 virtual unsigned selectOrSplit(LiveInterval&,
152 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000153
154 /// Perform register allocation.
155 virtual bool runOnMachineFunction(MachineFunction &mf);
156
157 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000158
159private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000160 bool checkUncachedInterference(LiveInterval&, unsigned);
161 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000162 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000163
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000164 void mapGlobalInterference(unsigned, SmallVectorImpl<IndexPair>&);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000165 float calcSplitConstraints(const SmallVectorImpl<IndexPair>&);
166
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000167 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000168 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
169 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000170 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
171 SlotIndex getPrevMappedIndex(const MachineInstr*);
172 void calcPrevSlots();
173 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000174 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000175
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000176 unsigned tryReassign(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000177 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000178 unsigned tryEvict(LiveInterval&, AllocationOrder&,
179 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000180 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
181 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000182 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
183 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000184 unsigned trySplit(LiveInterval&, AllocationOrder&,
185 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000186};
187} // end anonymous namespace
188
189char RAGreedy::ID = 0;
190
191FunctionPass* llvm::createGreedyRegisterAllocator() {
192 return new RAGreedy();
193}
194
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000195RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_Original) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000196 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000197 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
198 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
199 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
200 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
201 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
202 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
203 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
204 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000205 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000206 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000207 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
208 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000209}
210
211void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
212 AU.setPreservesCFG();
213 AU.addRequired<AliasAnalysis>();
214 AU.addPreserved<AliasAnalysis>();
215 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000216 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000217 AU.addPreserved<SlotIndexes>();
218 if (StrongPHIElim)
219 AU.addRequiredID(StrongPHIEliminationID);
220 AU.addRequiredTransitive<RegisterCoalescer>();
221 AU.addRequired<CalculateSpillWeights>();
222 AU.addRequired<LiveStacks>();
223 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000224 AU.addRequired<MachineDominatorTree>();
225 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000226 AU.addRequired<MachineLoopInfo>();
227 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000228 AU.addRequired<MachineLoopRanges>();
229 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000230 AU.addRequired<VirtRegMap>();
231 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000232 AU.addRequired<EdgeBundles>();
233 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000234 MachineFunctionPass::getAnalysisUsage(AU);
235}
236
237void RAGreedy::releaseMemory() {
238 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000239 LRStage.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000240 RegAllocBase::releaseMemory();
241}
242
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000243void RAGreedy::enqueue(LiveInterval *LI) {
244 // Prioritize live ranges by size, assigning larger ranges first.
245 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000246 const unsigned Size = LI->getSize();
247 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000248 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
249 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000250 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000251
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000252 LRStage.grow(Reg);
253 if (LRStage[Reg] == RS_Original)
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000254 // 1st generation ranges are handled first, long -> short.
255 Prio = (1u << 31) + Size;
256 else
257 // Repeat offenders are handled second, short -> long
258 Prio = (1u << 30) - Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000259
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000260 // Boost ranges that have a physical register hint.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000261 const unsigned Hint = VRM->getRegAllocPref(Reg);
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000262 if (TargetRegisterInfo::isPhysicalRegister(Hint))
263 Prio |= (1u << 30);
264
265 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000266}
267
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000268LiveInterval *RAGreedy::dequeue() {
269 if (Queue.empty())
270 return 0;
271 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
272 Queue.pop();
273 return LI;
274}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000275
276//===----------------------------------------------------------------------===//
277// Register Reassignment
278//===----------------------------------------------------------------------===//
279
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000280// Check interference without using the cache.
281bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
282 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000283 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
284 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000285 if (subQ.checkInterference())
286 return true;
287 }
288 return false;
289}
290
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000291/// getSingleInterference - Return the single interfering virtual register
292/// assigned to PhysReg. Return 0 if more than one virtual register is
293/// interfering.
294LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
295 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000296 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000297 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000298 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000299 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
300 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000301 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000302 return 0;
Jakob Stoklund Olesen417df012011-02-23 00:29:55 +0000303 if (Q.collectInterferingVRegs(2) > 1)
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000304 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000305 Interference = Q.interferingVRegs().front();
306 }
307 }
308 return Interference;
309}
310
Andrew Trickb853e6c2010-12-09 18:15:21 +0000311// Attempt to reassign this virtual register to a different physical register.
312//
313// FIXME: we are not yet caching these "second-level" interferences discovered
314// in the sub-queries. These interferences can change with each call to
315// selectOrSplit. However, we could implement a "may-interfere" cache that
316// could be conservatively dirtied when we reassign or split.
317//
318// FIXME: This may result in a lot of alias queries. We could summarize alias
319// live intervals in their parent register's live union, but it's messy.
320bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000321 unsigned WantedPhysReg) {
322 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
323 "Can only reassign virtual registers");
324 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000325 "inconsistent phys reg assigment");
326
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000327 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
328 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000329 // Don't reassign to a WantedPhysReg alias.
330 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000331 continue;
332
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000333 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000334 continue;
335
Andrew Trickb853e6c2010-12-09 18:15:21 +0000336 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000337 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
338 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
339 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000340 unassign(InterferingVReg, OldAssign);
341 assign(InterferingVReg, PhysReg);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000342 ++NumReassigned;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000343 return true;
344 }
345 return false;
346}
347
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000348/// tryReassign - Try to reassign a single interference to a different physreg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000349/// @param VirtReg Currently unassigned virtual register.
350/// @param Order Physregs to try.
351/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000352unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order,
353 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000354 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000355
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000356 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000357 while (unsigned PhysReg = Order.next()) {
358 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
359 if (!InterferingVReg)
360 continue;
361 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
362 continue;
363 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000364 return PhysReg;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000365 }
366 return 0;
367}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000368
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000369
370//===----------------------------------------------------------------------===//
371// Interference eviction
372//===----------------------------------------------------------------------===//
373
374/// canEvict - Return true if all interferences between VirtReg and PhysReg can
375/// be evicted. Set maxWeight to the maximal spill weight of an interference.
376bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000377 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000378 float Weight = 0;
379 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
380 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
381 // If there is 10 or more interferences, chances are one is smaller.
382 if (Q.collectInterferingVRegs(10) >= 10)
383 return false;
384
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000385 // Check if any interfering live range is heavier than VirtReg.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000386 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
387 LiveInterval *Intf = Q.interferingVRegs()[i];
388 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
389 return false;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000390 if (Intf->weight >= VirtReg.weight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000391 return false;
392 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000393 }
394 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000395 MaxWeight = Weight;
396 return true;
397}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000398
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000399/// tryEvict - Try to evict all interferences for a physreg.
400/// @param VirtReg Currently unassigned virtual register.
401/// @param Order Physregs to try.
402/// @return Physreg to assign VirtReg, or 0.
403unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
404 AllocationOrder &Order,
405 SmallVectorImpl<LiveInterval*> &NewVRegs){
406 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
407
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000408 // Keep track of the lightest single interference seen so far.
409 float BestWeight = 0;
410 unsigned BestPhys = 0;
411
412 Order.rewind();
413 while (unsigned PhysReg = Order.next()) {
414 float Weight = 0;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000415 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000416 continue;
417
418 // This is an eviction candidate.
419 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
420 << Weight << '\n');
421 if (BestPhys && Weight >= BestWeight)
422 continue;
423
424 // Best so far.
425 BestPhys = PhysReg;
426 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000427 // Stop if the hint can be used.
428 if (Order.isHint(PhysReg))
429 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000430 }
431
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000432 if (!BestPhys)
433 return 0;
434
435 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
436 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
437 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
438 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
439 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
440 LiveInterval *Intf = Q.interferingVRegs()[i];
441 unassign(*Intf, VRM->getPhys(Intf->reg));
442 ++NumEvicted;
443 NewVRegs.push_back(Intf);
444 }
445 }
446 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000447}
448
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000449
450//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000451// Region Splitting
452//===----------------------------------------------------------------------===//
453
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000454/// mapGlobalInterference - Compute a map of the interference from PhysReg and
455/// its aliases in each block in SA->LiveBlocks.
456/// If LiveBlocks[i] is live-in, Ranges[i].first is the first interference.
457/// If LiveBlocks[i] is live-out, Ranges[i].second is the last interference.
458void RAGreedy::mapGlobalInterference(unsigned PhysReg,
459 SmallVectorImpl<IndexPair> &Ranges) {
460 Ranges.assign(SA->LiveBlocks.size(), IndexPair());
461 LiveInterval &VirtReg = const_cast<LiveInterval&>(SA->getParent());
462 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
463 if (!query(VirtReg, *AI).checkInterference())
464 continue;
465 LiveIntervalUnion::SegmentIter IntI =
466 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
467 if (!IntI.valid())
468 continue;
469 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
470 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
471 IndexPair &IP = Ranges[i];
472
473 // Skip interference-free blocks.
474 if (IntI.start() >= BI.Stop)
475 continue;
476
477 // First interference in block.
478 if (BI.LiveIn) {
479 IntI.advanceTo(BI.Start);
480 if (!IntI.valid())
481 break;
482 if (IntI.start() >= BI.Stop)
483 continue;
484 if (!IP.first.isValid() || IntI.start() < IP.first)
485 IP.first = IntI.start();
486 }
487
488 // Last interference in block.
489 if (BI.LiveOut) {
490 IntI.advanceTo(BI.Stop);
491 if (!IntI.valid() || IntI.start() >= BI.Stop)
492 --IntI;
493 if (IntI.stop() <= BI.Start)
494 continue;
495 if (!IP.second.isValid() || IntI.stop() > IP.second)
496 IP.second = IntI.stop();
497 }
498 }
499 }
500}
501
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000502/// calcSplitConstraints - Fill out the SplitConstraints vector based on the
503/// interference pattern in Intf. Return the static cost of this split,
504/// assuming that all preferences in SplitConstraints are met.
505float RAGreedy::calcSplitConstraints(const SmallVectorImpl<IndexPair> &Intf) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000506 // Reset interference dependent info.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000507 SplitConstraints.resize(SA->LiveBlocks.size());
508 float StaticCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000509 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
510 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000511 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
512 IndexPair IP = Intf[i];
513
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000514 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000515 BC.Entry = (BI.Uses && BI.LiveIn) ?
516 SpillPlacement::PrefReg : SpillPlacement::DontCare;
517 BC.Exit = (BI.Uses && BI.LiveOut) ?
518 SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000519
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000520 // Number of spill code instructions to insert.
521 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000522
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000523 // Interference for the live-in value.
524 if (IP.first.isValid()) {
525 if (IP.first <= BI.Start)
526 BC.Entry = SpillPlacement::MustSpill, Ins += BI.Uses;
527 else if (!BI.Uses)
528 BC.Entry = SpillPlacement::PrefSpill;
529 else if (IP.first < BI.FirstUse)
530 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
531 else if (IP.first < (BI.LiveThrough ? BI.LastUse : BI.Kill))
532 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000533 }
534
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000535 // Interference for the live-out value.
536 if (IP.second.isValid()) {
537 if (IP.second >= BI.LastSplitPoint)
538 BC.Exit = SpillPlacement::MustSpill, Ins += BI.Uses;
539 else if (!BI.Uses)
540 BC.Exit = SpillPlacement::PrefSpill;
541 else if (IP.second > BI.LastUse)
542 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
543 else if (IP.second > (BI.LiveThrough ? BI.FirstUse : BI.Def))
544 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000545 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000546
547 // Accumulate the total frequency of inserted spill code.
548 if (Ins)
549 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000550 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000551 return StaticCost;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000552}
553
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000554
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000555/// calcGlobalSplitCost - Return the global split cost of following the split
556/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000557/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000558///
559float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
560 float GlobalCost = 0;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000561 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
562 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000563 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000564 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
565 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
566 unsigned Ins = 0;
567
568 if (!BI.Uses)
569 Ins += RegIn != RegOut;
570 else {
571 if (BI.LiveIn)
572 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
573 if (BI.LiveOut)
574 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
575 }
576 if (Ins)
577 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000578 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000579 return GlobalCost;
580}
581
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000582/// splitAroundRegion - Split VirtReg around the region determined by
583/// LiveBundles. Make an effort to avoid interference from PhysReg.
584///
585/// The 'register' interval is going to contain as many uses as possible while
586/// avoiding interference. The 'stack' interval is the complement constructed by
587/// SplitEditor. It will contain the rest.
588///
589void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
590 const BitVector &LiveBundles,
591 SmallVectorImpl<LiveInterval*> &NewVRegs) {
592 DEBUG({
593 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
594 << " with bundles";
595 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
596 dbgs() << " EB#" << i;
597 dbgs() << ".\n";
598 });
599
600 // First compute interference ranges in the live blocks.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000601 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000602 mapGlobalInterference(PhysReg, InterferenceRanges);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000603
Jakob Stoklund Olesen1973b3e2011-03-07 22:42:16 +0000604 LiveRangeEdit LREdit(VirtReg, NewVRegs);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000605 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000606
607 // Create the main cross-block interval.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000608 SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000609
610 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000611 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
612 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000613 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
614 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
615
616 // Should the register be live out?
617 if (!BI.LiveOut || !RegOut)
618 continue;
619
620 IndexPair &IP = InterferenceRanges[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000621 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000622 << Bundles->getBundle(BI.MBB->getNumber(), 1)
623 << " intf [" << IP.first << ';' << IP.second << ')');
624
625 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000626 assert((!IP.first.isValid() || IP.first < BI.Stop) && "Bad interference");
627 assert((!IP.second.isValid() || IP.second > BI.Start)
628 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000629
630 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000631 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000632 // Block is interference-free.
633 DEBUG(dbgs() << ", no interference");
634 if (!BI.Uses) {
635 assert(BI.LiveThrough && "No uses, but not live through block?");
636 // Block is live-through without interference.
637 DEBUG(dbgs() << ", no uses"
638 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
639 if (!RegIn)
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000640 SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000641 continue;
642 }
643 if (!BI.LiveThrough) {
644 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000645 SE->useIntv(SE->enterIntvBefore(BI.Def), BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000646 continue;
647 }
648 if (!RegIn) {
649 // Block is live-through, but entry bundle is on the stack.
650 // Reload just before the first use.
651 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000652 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000653 continue;
654 }
655 DEBUG(dbgs() << ", live-through.\n");
656 continue;
657 }
658
659 // Block has interference.
660 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000661
662 if (!BI.LiveThrough && IP.second <= BI.Def) {
663 // The interference doesn't reach the outgoing segment.
664 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000665 SE->useIntv(BI.Def, BI.Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000666 continue;
667 }
668
669
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000670 if (!BI.Uses) {
671 // No uses in block, avoid interference by reloading as late as possible.
672 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000673 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000674 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000675 continue;
676 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000677
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000678 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000679 // There are interference-free uses at the end of the block.
680 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000681 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000682 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
683 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000684 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
685 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000686 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000687 // Only attempt a split befroe the last split point.
688 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
689 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000690 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000691 assert(SegStart >= IP.second && "Couldn't avoid interference");
692 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000693 SE->useIntv(SegStart, BI.Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000694 continue;
695 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000696 }
697
698 // Interference is after the last use.
699 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000700 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000701 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000702 }
703
704 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000705 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
706 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000707 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
708 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
709
710 // Is the register live-in?
711 if (!BI.LiveIn || !RegIn)
712 continue;
713
714 // We have an incoming register. Check for interference.
715 IndexPair &IP = InterferenceRanges[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000716
717 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
718 << " -> BB#" << BI.MBB->getNumber());
719
720 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000721 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000722 // Block is interference-free.
723 DEBUG(dbgs() << ", no interference");
724 if (!BI.Uses) {
725 assert(BI.LiveThrough && "No uses, but not live through block?");
726 // Block is live-through without interference.
727 if (RegOut) {
728 DEBUG(dbgs() << ", no uses, live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000729 SE->useIntv(BI.Start, BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000730 } else {
731 DEBUG(dbgs() << ", no uses, stack-out.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000732 SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000733 }
734 continue;
735 }
736 if (!BI.LiveThrough) {
737 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000738 SE->useIntv(BI.Start, SE->leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000739 continue;
740 }
741 if (!RegOut) {
742 // Block is live-through, but exit bundle is on the stack.
743 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000744 if (BI.LastUse < BI.LastSplitPoint) {
745 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000746 SE->useIntv(BI.Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000747 continue;
748 }
749 // The last use is after the last split point, it is probably an
750 // indirect jump.
751 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
752 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000753 SlotIndex SegEnd = SE->leaveIntvBefore(BI.LastSplitPoint);
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000754 SE->useIntv(BI.Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000755 // Run a double interval from the split to the last use.
756 // This makes it possible to spill the complement without affecting the
757 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000758 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000759 continue;
760 }
761 // Register is live-through.
762 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000763 SE->useIntv(BI.Start, BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000764 continue;
765 }
766
767 // Block has interference.
768 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000769
770 if (!BI.LiveThrough && IP.first >= BI.Kill) {
771 // The interference doesn't reach the outgoing segment.
772 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000773 SE->useIntv(BI.Start, BI.Kill);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000774 continue;
775 }
776
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000777 if (!BI.Uses) {
778 // No uses in block, avoid interference by spilling as soon as possible.
779 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000780 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000781 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000782 continue;
783 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000784 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000785 // There are interference-free uses at the beginning of the block.
786 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000787 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000788 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
789 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000790 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
791 SlotIndex Use = (--UI)->getBoundaryIndex();
792 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000793 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000794 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000795 SE->useIntv(BI.Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000796 continue;
797 }
798
799 // Interference is before the first use.
800 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000801 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000802 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000803 }
804
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000805 SE->closeIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000806
807 // FIXME: Should we be more aggressive about splitting the stack region into
808 // per-block segments? The current approach allows the stack region to
809 // separate into connected components. Some components may be allocatable.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000810 SE->finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000811 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000812
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000813 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000814 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000815
816#ifndef NDEBUG
817 // Make sure that at least one of the new intervals can allocate to PhysReg.
818 // That was the whole point of splitting the live range.
819 bool found = false;
820 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
821 ++I)
822 if (!checkUncachedInterference(**I, PhysReg)) {
823 found = true;
824 break;
825 }
826 assert(found && "No allocatable intervals after pointless splitting");
827#endif
828 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000829}
830
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000831unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
832 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000833 BitVector LiveBundles, BestBundles;
834 float BestCost = 0;
835 unsigned BestReg = 0;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000836
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000837 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000838 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
839 if (GlobalCand.size() <= Cand)
840 GlobalCand.resize(Cand+1);
841 GlobalCand[Cand].PhysReg = PhysReg;
842
843 mapGlobalInterference(PhysReg, GlobalCand[Cand].Interference);
844 float Cost = calcSplitConstraints(GlobalCand[Cand].Interference);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000845 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tstatic = " << Cost);
846 if (BestReg && Cost >= BestCost) {
847 DEBUG(dbgs() << " higher.\n");
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000848 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000849 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000850
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000851 SpillPlacer->placeSpills(SplitConstraints, LiveBundles);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000852 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000853 if (!LiveBundles.any()) {
854 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000855 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000856 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000857
858 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000859 DEBUG({
860 dbgs() << ", total = " << Cost << " with bundles";
861 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
862 dbgs() << " EB#" << i;
863 dbgs() << ".\n";
864 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000865 if (!BestReg || Cost < BestCost) {
866 BestReg = PhysReg;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000867 BestCost = 0.98f * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000868 BestBundles.swap(LiveBundles);
869 }
870 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000871
872 if (!BestReg)
873 return 0;
874
875 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000876 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Region);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000877 return 0;
878}
879
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000880
881//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000882// Local Splitting
883//===----------------------------------------------------------------------===//
884
885
886/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
887/// in order to use PhysReg between two entries in SA->UseSlots.
888///
889/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
890///
891void RAGreedy::calcGapWeights(unsigned PhysReg,
892 SmallVectorImpl<float> &GapWeight) {
893 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
894 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
895 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
896 const unsigned NumGaps = Uses.size()-1;
897
898 // Start and end points for the interference check.
899 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
900 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
901
902 GapWeight.assign(NumGaps, 0.0f);
903
904 // Add interference from each overlapping register.
905 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
906 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
907 .checkInterference())
908 continue;
909
910 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
911 // so we don't need InterferenceQuery.
912 //
913 // Interference that overlaps an instruction is counted in both gaps
914 // surrounding the instruction. The exception is interference before
915 // StartIdx and after StopIdx.
916 //
917 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
918 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
919 // Skip the gaps before IntI.
920 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
921 if (++Gap == NumGaps)
922 break;
923 if (Gap == NumGaps)
924 break;
925
926 // Update the gaps covered by IntI.
927 const float weight = IntI.value()->weight;
928 for (; Gap != NumGaps; ++Gap) {
929 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
930 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
931 break;
932 }
933 if (Gap == NumGaps)
934 break;
935 }
936 }
937}
938
939/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
940/// before MI that has a slot index. If MI is the first mapped instruction in
941/// its block, return the block start index instead.
942///
943SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
944 assert(MI && "Missing MachineInstr");
945 const MachineBasicBlock *MBB = MI->getParent();
946 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
947 while (I != B)
948 if (!(--I)->isDebugValue() && !I->isCopy())
949 return Indexes->getInstructionIndex(I);
950 return Indexes->getMBBStartIdx(MBB);
951}
952
953/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
954/// real non-copy instruction for each instruction in SA->UseSlots.
955///
956void RAGreedy::calcPrevSlots() {
957 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
958 PrevSlot.clear();
959 PrevSlot.reserve(Uses.size());
960 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
961 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
962 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
963 }
964}
965
966/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
967/// be beneficial to split before UseSlots[i].
968///
969/// 0 is always a valid split point
970unsigned RAGreedy::nextSplitPoint(unsigned i) {
971 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
972 const unsigned Size = Uses.size();
973 assert(i != Size && "No split points after the end");
974 // Allow split before i when Uses[i] is not adjacent to the previous use.
975 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
976 ;
977 return i;
978}
979
980/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
981/// basic block.
982///
983unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
984 SmallVectorImpl<LiveInterval*> &NewVRegs) {
985 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
986 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
987
988 // Note that it is possible to have an interval that is live-in or live-out
989 // while only covering a single block - A phi-def can use undef values from
990 // predecessors, and the block could be a single-block loop.
991 // We don't bother doing anything clever about such a case, we simply assume
992 // that the interval is continuous from FirstUse to LastUse. We should make
993 // sure that we don't do anything illegal to such an interval, though.
994
995 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
996 if (Uses.size() <= 2)
997 return 0;
998 const unsigned NumGaps = Uses.size()-1;
999
1000 DEBUG({
1001 dbgs() << "tryLocalSplit: ";
1002 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1003 dbgs() << ' ' << SA->UseSlots[i];
1004 dbgs() << '\n';
1005 });
1006
1007 // For every use, find the previous mapped non-copy instruction.
1008 // We use this to detect valid split points, and to estimate new interval
1009 // sizes.
1010 calcPrevSlots();
1011
1012 unsigned BestBefore = NumGaps;
1013 unsigned BestAfter = 0;
1014 float BestDiff = 0;
1015
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +00001016 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001017 SmallVector<float, 8> GapWeight;
1018
1019 Order.rewind();
1020 while (unsigned PhysReg = Order.next()) {
1021 // Keep track of the largest spill weight that would need to be evicted in
1022 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1023 calcGapWeights(PhysReg, GapWeight);
1024
1025 // Try to find the best sequence of gaps to close.
1026 // The new spill weight must be larger than any gap interference.
1027
1028 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1029 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1030
1031 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1032 // It is the spill weight that needs to be evicted.
1033 float MaxGap = GapWeight[0];
1034 for (unsigned i = 1; i != SplitAfter; ++i)
1035 MaxGap = std::max(MaxGap, GapWeight[i]);
1036
1037 for (;;) {
1038 // Live before/after split?
1039 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1040 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1041
1042 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1043 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1044 << " i=" << MaxGap);
1045
1046 // Stop before the interval gets so big we wouldn't be making progress.
1047 if (!LiveBefore && !LiveAfter) {
1048 DEBUG(dbgs() << " all\n");
1049 break;
1050 }
1051 // Should the interval be extended or shrunk?
1052 bool Shrink = true;
1053 if (MaxGap < HUGE_VALF) {
1054 // Estimate the new spill weight.
1055 //
1056 // Each instruction reads and writes the register, except the first
1057 // instr doesn't read when !FirstLive, and the last instr doesn't write
1058 // when !LastLive.
1059 //
1060 // We will be inserting copies before and after, so the total number of
1061 // reads and writes is 2 * EstUses.
1062 //
1063 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1064 2*(LiveBefore + LiveAfter);
1065
1066 // Try to guess the size of the new interval. This should be trivial,
1067 // but the slot index of an inserted copy can be a lot smaller than the
1068 // instruction it is inserted before if there are many dead indexes
1069 // between them.
1070 //
1071 // We measure the distance from the instruction before SplitBefore to
1072 // get a conservative estimate.
1073 //
1074 // The final distance can still be different if inserting copies
1075 // triggers a slot index renumbering.
1076 //
1077 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1078 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1079 // Would this split be possible to allocate?
1080 // Never allocate all gaps, we wouldn't be making progress.
1081 float Diff = EstWeight - MaxGap;
1082 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1083 if (Diff > 0) {
1084 Shrink = false;
1085 if (Diff > BestDiff) {
1086 DEBUG(dbgs() << " (best)");
1087 BestDiff = Diff;
1088 BestBefore = SplitBefore;
1089 BestAfter = SplitAfter;
1090 }
1091 }
1092 }
1093
1094 // Try to shrink.
1095 if (Shrink) {
1096 SplitBefore = nextSplitPoint(SplitBefore);
1097 if (SplitBefore < SplitAfter) {
1098 DEBUG(dbgs() << " shrink\n");
1099 // Recompute the max when necessary.
1100 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1101 MaxGap = GapWeight[SplitBefore];
1102 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1103 MaxGap = std::max(MaxGap, GapWeight[i]);
1104 }
1105 continue;
1106 }
1107 MaxGap = 0;
1108 }
1109
1110 // Try to extend the interval.
1111 if (SplitAfter >= NumGaps) {
1112 DEBUG(dbgs() << " end\n");
1113 break;
1114 }
1115
1116 DEBUG(dbgs() << " extend\n");
1117 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1118 SplitAfter != e; ++SplitAfter)
1119 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1120 continue;
1121 }
1122 }
1123
1124 // Didn't find any candidates?
1125 if (BestBefore == NumGaps)
1126 return 0;
1127
1128 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1129 << '-' << Uses[BestAfter] << ", " << BestDiff
1130 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1131
Jakob Stoklund Olesen1973b3e2011-03-07 22:42:16 +00001132 LiveRangeEdit LREdit(VirtReg, NewVRegs);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001133 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001134
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001135 SE->openIntv();
1136 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1137 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1138 SE->useIntv(SegStart, SegStop);
1139 SE->closeIntv();
1140 SE->finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001141 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001142 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001143
1144 return 0;
1145}
1146
1147//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001148// Live Range Splitting
1149//===----------------------------------------------------------------------===//
1150
1151/// trySplit - Try to split VirtReg or one of its interferences, making it
1152/// assignable.
1153/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1154unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1155 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001156 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001157 if (LIS->intervalIsInOneMBB(VirtReg)) {
1158 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001159 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001160 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001161 }
1162
1163 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001164
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001165 // Don't iterate global splitting.
1166 // Move straight to spilling if this range was produced by a global split.
1167 LiveRangeStage Stage = getStage(VirtReg);
1168 if (Stage >= RS_Block)
1169 return 0;
1170
1171 SA->analyze(&VirtReg);
1172
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001173 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001174 if (Stage < RS_Region) {
1175 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1176 if (PhysReg || !NewVRegs.empty())
1177 return PhysReg;
1178 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001179
1180 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001181 if (Stage < RS_Block) {
1182 SplitAnalysis::BlockPtrSet Blocks;
1183 if (SA->getMultiUseBlocks(Blocks)) {
Jakob Stoklund Olesen1973b3e2011-03-07 22:42:16 +00001184 LiveRangeEdit LREdit(VirtReg, NewVRegs);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001185 SE->reset(LREdit);
1186 SE->splitSingleBlocks(Blocks);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001187 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Block);
1188 if (VerifyEnabled)
1189 MF->verify(this, "After splitting live range around basic blocks");
1190 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001191 }
1192
1193 // Don't assign any physregs.
1194 return 0;
1195}
1196
1197
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001198//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001199// Main Entry Point
1200//===----------------------------------------------------------------------===//
1201
1202unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001203 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001204 LiveRangeStage Stage = getStage(VirtReg);
1205 if (Stage == RS_Original)
1206 LRStage[VirtReg.reg] = RS_Second;
1207
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001208 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001209 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1210 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001211 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001212 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001213 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001214
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001215 if (unsigned PhysReg = tryReassign(VirtReg, Order, NewVRegs))
1216 return PhysReg;
1217
1218 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001219 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001220
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001221 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1222
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001223 // The first time we see a live range, don't try to split or spill.
1224 // Wait until the second time, when all smaller ranges have been allocated.
1225 // This gives a better picture of the interference to split around.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001226 if (Stage == RS_Original) {
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001227 NewVRegs.push_back(&VirtReg);
1228 return 0;
1229 }
1230
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001231 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1232
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001233 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001234 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1235 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001236 return PhysReg;
1237
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001238 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001239 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001240 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001241 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001242
1243 // The live virtual register requesting allocation was spilled, so tell
1244 // the caller not to allocate anything during this round.
1245 return 0;
1246}
1247
1248bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1249 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1250 << "********** Function: "
1251 << ((Value*)mf.getFunction())->getName() << '\n');
1252
1253 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001254 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001255 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001256
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001257 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001258 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001259 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001260 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001261 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001262 Loops = &getAnalysis<MachineLoopInfo>();
1263 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001264 Bundles = &getAnalysis<EdgeBundles>();
1265 SpillPlacer = &getAnalysis<SpillPlacement>();
1266
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001267 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001268 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001269 LRStage.clear();
1270 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001271
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001272 allocatePhysRegs();
1273 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001274 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001275
1276 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001277 {
1278 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001279 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001280 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001281
1282 // The pass output is in VirtRegMap. Release all the transient data.
1283 releaseMemory();
1284
1285 return true;
1286}