blob: 88931cc40cbe1e1c43933f853e4d388e0c31ac5c [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000024#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000046#include <queue>
47
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000048using namespace llvm;
49
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000050STATISTIC(NumGlobalSplits, "Number of split global live ranges");
51STATISTIC(NumLocalSplits, "Number of split local live ranges");
52STATISTIC(NumReassigned, "Number of interferences reassigned");
53STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
59class RAGreedy : public MachineFunctionPass, public RegAllocBase {
60 // context
61 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000062 BitVector ReservedRegs;
63
64 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000065 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000067 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 MachineLoopInfo *Loops;
69 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 EdgeBundles *Bundles;
71 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000072
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000073 // state
74 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000075 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000076
77 // Live ranges pass through a number of stages as we try to allocate them.
78 // Some of the stages may also create new live ranges:
79 //
80 // - Region splitting.
81 // - Per-block splitting.
82 // - Local splitting.
83 // - Spilling.
84 //
85 // Ranges produced by one of the stages skip the previous stages when they are
86 // dequeued. This improves performance because we can skip interference checks
87 // that are unlikely to give any results. It also guarantees that the live
88 // range splitting algorithm terminates, something that is otherwise hard to
89 // ensure.
90 enum LiveRangeStage {
91 RS_Original, ///< Never seen before, never split.
92 RS_Second, ///< Second time in the queue.
93 RS_Region, ///< Produced by region splitting.
94 RS_Block, ///< Produced by per-block splitting.
95 RS_Local, ///< Produced by local splitting.
96 RS_Spill ///< Produced by spilling.
97 };
98
99 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
100
101 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
102 return LiveRangeStage(LRStage[VirtReg.reg]);
103 }
104
105 template<typename Iterator>
106 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
107 LRStage.resize(MRI->getNumVirtRegs());
108 for (;Begin != End; ++Begin)
109 LRStage[(*Begin)->reg] = NewStage;
110 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000111
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000112 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000113 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000114
115 /// All basic blocks where the current register is live.
116 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
117
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000118 /// For every instruction in SA->UseSlots, store the previous non-copy
119 /// instruction.
120 SmallVector<SlotIndex, 8> PrevSlot;
121
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000122public:
123 RAGreedy();
124
125 /// Return the pass name.
126 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000127 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000128 }
129
130 /// RAGreedy analysis usage.
131 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000132 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000133 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000134 virtual void enqueue(LiveInterval *LI);
135 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000136 virtual unsigned selectOrSplit(LiveInterval&,
137 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000138
139 /// Perform register allocation.
140 virtual bool runOnMachineFunction(MachineFunction &mf);
141
142 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000143
144private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000145 bool checkUncachedInterference(LiveInterval&, unsigned);
146 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000147 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000148 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000149 float calcInterferenceInfo(LiveInterval&, unsigned);
150 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000151 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
152 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000153 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
154 SlotIndex getPrevMappedIndex(const MachineInstr*);
155 void calcPrevSlots();
156 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000157 bool canEvictInterference(LiveInterval&, unsigned, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000158
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000159 unsigned tryReassign(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000160 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000161 unsigned tryEvict(LiveInterval&, AllocationOrder&,
162 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000163 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
164 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000165 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
166 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000167 unsigned trySplit(LiveInterval&, AllocationOrder&,
168 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000169 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
170 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000171};
172} // end anonymous namespace
173
174char RAGreedy::ID = 0;
175
176FunctionPass* llvm::createGreedyRegisterAllocator() {
177 return new RAGreedy();
178}
179
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000180RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_Original) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000181 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000182 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
183 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
184 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
185 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
186 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
187 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
188 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
189 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000190 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000191 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000192 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
193 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000194}
195
196void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
197 AU.setPreservesCFG();
198 AU.addRequired<AliasAnalysis>();
199 AU.addPreserved<AliasAnalysis>();
200 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000201 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000202 AU.addPreserved<SlotIndexes>();
203 if (StrongPHIElim)
204 AU.addRequiredID(StrongPHIEliminationID);
205 AU.addRequiredTransitive<RegisterCoalescer>();
206 AU.addRequired<CalculateSpillWeights>();
207 AU.addRequired<LiveStacks>();
208 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000209 AU.addRequired<MachineDominatorTree>();
210 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000211 AU.addRequired<MachineLoopInfo>();
212 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000213 AU.addRequired<MachineLoopRanges>();
214 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000215 AU.addRequired<VirtRegMap>();
216 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000217 AU.addRequired<EdgeBundles>();
218 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000219 MachineFunctionPass::getAnalysisUsage(AU);
220}
221
222void RAGreedy::releaseMemory() {
223 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000224 LRStage.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000225 RegAllocBase::releaseMemory();
226}
227
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000228void RAGreedy::enqueue(LiveInterval *LI) {
229 // Prioritize live ranges by size, assigning larger ranges first.
230 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000231 const unsigned Size = LI->getSize();
232 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000233 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
234 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000235 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000236
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000237 LRStage.grow(Reg);
238 if (LRStage[Reg] == RS_Original)
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000239 // 1st generation ranges are handled first, long -> short.
240 Prio = (1u << 31) + Size;
241 else
242 // Repeat offenders are handled second, short -> long
243 Prio = (1u << 30) - Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000244
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000245 // Boost ranges that have a physical register hint.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000246 const unsigned Hint = VRM->getRegAllocPref(Reg);
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000247 if (TargetRegisterInfo::isPhysicalRegister(Hint))
248 Prio |= (1u << 30);
249
250 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000251}
252
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000253LiveInterval *RAGreedy::dequeue() {
254 if (Queue.empty())
255 return 0;
256 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
257 Queue.pop();
258 return LI;
259}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000260
261//===----------------------------------------------------------------------===//
262// Register Reassignment
263//===----------------------------------------------------------------------===//
264
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000265// Check interference without using the cache.
266bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
267 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000268 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
269 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000270 if (subQ.checkInterference())
271 return true;
272 }
273 return false;
274}
275
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000276/// getSingleInterference - Return the single interfering virtual register
277/// assigned to PhysReg. Return 0 if more than one virtual register is
278/// interfering.
279LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
280 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000281 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000282 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000283 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000284 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
285 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000286 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000287 return 0;
Jakob Stoklund Olesen417df012011-02-23 00:29:55 +0000288 if (Q.collectInterferingVRegs(2) > 1)
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000289 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000290 Interference = Q.interferingVRegs().front();
291 }
292 }
293 return Interference;
294}
295
Andrew Trickb853e6c2010-12-09 18:15:21 +0000296// Attempt to reassign this virtual register to a different physical register.
297//
298// FIXME: we are not yet caching these "second-level" interferences discovered
299// in the sub-queries. These interferences can change with each call to
300// selectOrSplit. However, we could implement a "may-interfere" cache that
301// could be conservatively dirtied when we reassign or split.
302//
303// FIXME: This may result in a lot of alias queries. We could summarize alias
304// live intervals in their parent register's live union, but it's messy.
305bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000306 unsigned WantedPhysReg) {
307 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
308 "Can only reassign virtual registers");
309 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000310 "inconsistent phys reg assigment");
311
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000312 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
313 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000314 // Don't reassign to a WantedPhysReg alias.
315 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000316 continue;
317
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000318 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000319 continue;
320
Andrew Trickb853e6c2010-12-09 18:15:21 +0000321 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000322 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
323 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
324 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000325 unassign(InterferingVReg, OldAssign);
326 assign(InterferingVReg, PhysReg);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000327 ++NumReassigned;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000328 return true;
329 }
330 return false;
331}
332
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000333/// tryReassign - Try to reassign a single interference to a different physreg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000334/// @param VirtReg Currently unassigned virtual register.
335/// @param Order Physregs to try.
336/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000337unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order,
338 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000339 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000340
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000341 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000342 while (unsigned PhysReg = Order.next()) {
343 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
344 if (!InterferingVReg)
345 continue;
346 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
347 continue;
348 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000349 return PhysReg;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000350 }
351 return 0;
352}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000353
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000354
355//===----------------------------------------------------------------------===//
356// Interference eviction
357//===----------------------------------------------------------------------===//
358
359/// canEvict - Return true if all interferences between VirtReg and PhysReg can
360/// be evicted. Set maxWeight to the maximal spill weight of an interference.
361bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
362 unsigned Size, float &MaxWeight) {
363 float Weight = 0;
364 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
365 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
366 // If there is 10 or more interferences, chances are one is smaller.
367 if (Q.collectInterferingVRegs(10) >= 10)
368 return false;
369
370 // CHeck if any interfering live range is shorter than VirtReg.
371 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
372 LiveInterval *Intf = Q.interferingVRegs()[i];
373 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
374 return false;
375 if (Intf->getSize() <= Size)
376 return false;
377 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000378 }
379 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000380 MaxWeight = Weight;
381 return true;
382}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000383
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000384/// tryEvict - Try to evict all interferences for a physreg.
385/// @param VirtReg Currently unassigned virtual register.
386/// @param Order Physregs to try.
387/// @return Physreg to assign VirtReg, or 0.
388unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
389 AllocationOrder &Order,
390 SmallVectorImpl<LiveInterval*> &NewVRegs){
391 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
392
393 // We can only evict interference if all interfering registers are virtual and
394 // longer than VirtReg.
395 const unsigned Size = VirtReg.getSize();
396
397 // Keep track of the lightest single interference seen so far.
398 float BestWeight = 0;
399 unsigned BestPhys = 0;
400
401 Order.rewind();
402 while (unsigned PhysReg = Order.next()) {
403 float Weight = 0;
404 if (!canEvictInterference(VirtReg, PhysReg, Size, Weight))
405 continue;
406
407 // This is an eviction candidate.
408 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
409 << Weight << '\n');
410 if (BestPhys && Weight >= BestWeight)
411 continue;
412
413 // Best so far.
414 BestPhys = PhysReg;
415 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000416 // Stop if the hint can be used.
417 if (Order.isHint(PhysReg))
418 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000419 }
420
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000421 if (!BestPhys)
422 return 0;
423
424 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
425 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
426 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
427 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
428 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
429 LiveInterval *Intf = Q.interferingVRegs()[i];
430 unassign(*Intf, VRM->getPhys(Intf->reg));
431 ++NumEvicted;
432 NewVRegs.push_back(Intf);
433 }
434 }
435 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000436}
437
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000438
439//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000440// Region Splitting
441//===----------------------------------------------------------------------===//
442
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000443/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
444/// when considering interference from PhysReg. Also compute an optimistic local
445/// cost of this interference pattern.
446///
447/// The final cost of a split is the local cost + global cost of preferences
448/// broken by SpillPlacement.
449///
450float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
451 // Reset interference dependent info.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000452 SpillConstraints.resize(SA->LiveBlocks.size());
453 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
454 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000455 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000456 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000457 BC.Entry = (BI.Uses && BI.LiveIn) ?
458 SpillPlacement::PrefReg : SpillPlacement::DontCare;
459 BC.Exit = (BI.Uses && BI.LiveOut) ?
460 SpillPlacement::PrefReg : SpillPlacement::DontCare;
461 BI.OverlapEntry = BI.OverlapExit = false;
462 }
463
464 // Add interference info from each PhysReg alias.
465 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
466 if (!query(VirtReg, *AI).checkInterference())
467 continue;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000468 LiveIntervalUnion::SegmentIter IntI =
469 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
470 if (!IntI.valid())
471 continue;
472
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000473 // Determine which blocks have interference live in or after the last split
474 // point.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000475 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
476 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000477 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
478 SlotIndex Start, Stop;
479 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
480
481 // Skip interference-free blocks.
482 if (IntI.start() >= Stop)
483 continue;
484
485 // Is the interference live-in?
486 if (BI.LiveIn) {
487 IntI.advanceTo(Start);
488 if (!IntI.valid())
489 break;
490 if (IntI.start() <= Start)
491 BC.Entry = SpillPlacement::MustSpill;
492 }
493
494 // Is the interference overlapping the last split point?
495 if (BI.LiveOut) {
496 if (IntI.stop() < BI.LastSplitPoint)
497 IntI.advanceTo(BI.LastSplitPoint.getPrevSlot());
498 if (!IntI.valid())
499 break;
500 if (IntI.start() < Stop)
501 BC.Exit = SpillPlacement::MustSpill;
502 }
503 }
504
505 // Rewind iterator and check other interferences.
506 IntI.find(VirtReg.beginIndex());
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000507 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
508 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000509 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
510 SlotIndex Start, Stop;
511 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
512
513 // Skip interference-free blocks.
514 if (IntI.start() >= Stop)
515 continue;
516
517 // Handle transparent blocks with interference separately.
518 // Transparent blocks never incur any fixed cost.
519 if (BI.LiveThrough && !BI.Uses) {
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000520 IntI.advanceTo(Start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000521 if (!IntI.valid())
522 break;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000523 if (IntI.start() >= Stop)
524 continue;
525
526 if (BC.Entry != SpillPlacement::MustSpill)
527 BC.Entry = SpillPlacement::PrefSpill;
528 if (BC.Exit != SpillPlacement::MustSpill)
529 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000530 continue;
531 }
532
533 // Now we only have blocks with uses left.
534 // Check if the interference overlaps the uses.
535 assert(BI.Uses && "Non-transparent block without any uses");
536
537 // Check interference on entry.
538 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
539 IntI.advanceTo(Start);
540 if (!IntI.valid())
541 break;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000542 // Not live in, but before the first use.
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000543 if (IntI.start() < BI.FirstUse) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000544 BC.Entry = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000545 // If the block contains a kill from an earlier split, never split
546 // again in the same block.
547 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Kill))
548 BC.Entry = SpillPlacement::MustSpill;
549 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000550 }
551
552 // Does interference overlap the uses in the entry segment
553 // [FirstUse;Kill)?
554 if (BI.LiveIn && !BI.OverlapEntry) {
555 IntI.advanceTo(BI.FirstUse);
556 if (!IntI.valid())
557 break;
558 // A live-through interval has no kill.
559 // Check [FirstUse;LastUse) instead.
560 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
561 BI.OverlapEntry = true;
562 }
563
564 // Does interference overlap the uses in the exit segment [Def;LastUse)?
565 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
566 IntI.advanceTo(BI.Def);
567 if (!IntI.valid())
568 break;
569 if (IntI.start() < BI.LastUse)
570 BI.OverlapExit = true;
571 }
572
573 // Check interference on exit.
574 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
575 // Check interference between LastUse and Stop.
576 if (BC.Exit != SpillPlacement::PrefSpill) {
577 IntI.advanceTo(BI.LastUse);
578 if (!IntI.valid())
579 break;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000580 if (IntI.start() < Stop) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000581 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000582 // Avoid splitting twice in the same block.
583 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Def))
584 BC.Exit = SpillPlacement::MustSpill;
585 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000586 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000587 }
588 }
589 }
590
591 // Accumulate a local cost of this interference pattern.
592 float LocalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000593 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
594 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000595 if (!BI.Uses)
596 continue;
597 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
598 unsigned Inserts = 0;
599
600 // Do we need spill code for the entry segment?
601 if (BI.LiveIn)
602 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
603
604 // For the exit segment?
605 if (BI.LiveOut)
606 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
607
608 // The local cost of spill code in this block is the block frequency times
609 // the number of spill instructions inserted.
610 if (Inserts)
611 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
612 }
613 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
614 << LocalCost << '\n');
615 return LocalCost;
616}
617
618/// calcGlobalSplitCost - Return the global split cost of following the split
619/// pattern in LiveBundles. This cost should be added to the local cost of the
620/// interference pattern in SpillConstraints.
621///
622float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
623 float GlobalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000624 for (unsigned i = 0, e = SpillConstraints.size(); i != e; ++i) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000625 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
626 unsigned Inserts = 0;
627 // Broken entry preference?
628 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
629 (BC.Entry == SpillPlacement::PrefReg);
630 // Broken exit preference?
631 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
632 (BC.Exit == SpillPlacement::PrefReg);
633 if (Inserts)
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000634 GlobalCost +=
635 Inserts * SpillPlacer->getBlockFrequency(SA->LiveBlocks[i].MBB);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000636 }
637 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
638 return GlobalCost;
639}
640
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000641/// splitAroundRegion - Split VirtReg around the region determined by
642/// LiveBundles. Make an effort to avoid interference from PhysReg.
643///
644/// The 'register' interval is going to contain as many uses as possible while
645/// avoiding interference. The 'stack' interval is the complement constructed by
646/// SplitEditor. It will contain the rest.
647///
648void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
649 const BitVector &LiveBundles,
650 SmallVectorImpl<LiveInterval*> &NewVRegs) {
651 DEBUG({
652 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
653 << " with bundles";
654 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
655 dbgs() << " EB#" << i;
656 dbgs() << ".\n";
657 });
658
659 // First compute interference ranges in the live blocks.
660 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
661 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000662 InterferenceRanges.resize(SA->LiveBlocks.size());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000663 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
664 if (!query(VirtReg, *AI).checkInterference())
665 continue;
666 LiveIntervalUnion::SegmentIter IntI =
667 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
668 if (!IntI.valid())
669 continue;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000670 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
671 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000672 IndexPair &IP = InterferenceRanges[i];
673 SlotIndex Start, Stop;
674 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
675 // Skip interference-free blocks.
676 if (IntI.start() >= Stop)
677 continue;
678
679 // First interference in block.
680 if (BI.LiveIn) {
681 IntI.advanceTo(Start);
682 if (!IntI.valid())
683 break;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000684 if (IntI.start() >= Stop)
685 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000686 if (!IP.first.isValid() || IntI.start() < IP.first)
687 IP.first = IntI.start();
688 }
689
690 // Last interference in block.
691 if (BI.LiveOut) {
692 IntI.advanceTo(Stop);
693 if (!IntI.valid() || IntI.start() >= Stop)
694 --IntI;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000695 if (IntI.stop() <= Start)
696 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000697 if (!IP.second.isValid() || IntI.stop() > IP.second)
698 IP.second = IntI.stop();
699 }
700 }
701 }
702
703 SmallVector<LiveInterval*, 4> SpillRegs;
704 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
705 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
706
707 // Create the main cross-block interval.
708 SE.openIntv();
709
710 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000711 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
712 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000713 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
714 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
715
716 // Should the register be live out?
717 if (!BI.LiveOut || !RegOut)
718 continue;
719
720 IndexPair &IP = InterferenceRanges[i];
721 SlotIndex Start, Stop;
722 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
723
724 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000725 << Bundles->getBundle(BI.MBB->getNumber(), 1)
726 << " intf [" << IP.first << ';' << IP.second << ')');
727
728 // The interference interval should either be invalid or overlap MBB.
729 assert((!IP.first.isValid() || IP.first < Stop) && "Bad interference");
730 assert((!IP.second.isValid() || IP.second > Start) && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000731
732 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000733 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000734 // Block is interference-free.
735 DEBUG(dbgs() << ", no interference");
736 if (!BI.Uses) {
737 assert(BI.LiveThrough && "No uses, but not live through block?");
738 // Block is live-through without interference.
739 DEBUG(dbgs() << ", no uses"
740 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
741 if (!RegIn)
742 SE.enterIntvAtEnd(*BI.MBB);
743 continue;
744 }
745 if (!BI.LiveThrough) {
746 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000747 SE.useIntv(SE.enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000748 continue;
749 }
750 if (!RegIn) {
751 // Block is live-through, but entry bundle is on the stack.
752 // Reload just before the first use.
753 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000754 SE.useIntv(SE.enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000755 continue;
756 }
757 DEBUG(dbgs() << ", live-through.\n");
758 continue;
759 }
760
761 // Block has interference.
762 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000763
764 if (!BI.LiveThrough && IP.second <= BI.Def) {
765 // The interference doesn't reach the outgoing segment.
766 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
767 SE.useIntv(BI.Def, Stop);
768 continue;
769 }
770
771
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000772 if (!BI.Uses) {
773 // No uses in block, avoid interference by reloading as late as possible.
774 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000775 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
776 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000777 continue;
778 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000779
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000780 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000781 // There are interference-free uses at the end of the block.
782 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000783 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000784 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
785 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000786 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
787 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000788 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000789 // Only attempt a split befroe the last split point.
790 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
791 DEBUG(dbgs() << ", free use at " << Use << ".\n");
792 SlotIndex SegStart = SE.enterIntvBefore(Use);
793 assert(SegStart >= IP.second && "Couldn't avoid interference");
794 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
795 SE.useIntv(SegStart, Stop);
796 continue;
797 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000798 }
799
800 // Interference is after the last use.
801 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000802 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
803 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000804 }
805
806 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000807 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
808 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000809 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
810 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
811
812 // Is the register live-in?
813 if (!BI.LiveIn || !RegIn)
814 continue;
815
816 // We have an incoming register. Check for interference.
817 IndexPair &IP = InterferenceRanges[i];
818 SlotIndex Start, Stop;
819 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
820
821 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
822 << " -> BB#" << BI.MBB->getNumber());
823
824 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000825 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000826 // Block is interference-free.
827 DEBUG(dbgs() << ", no interference");
828 if (!BI.Uses) {
829 assert(BI.LiveThrough && "No uses, but not live through block?");
830 // Block is live-through without interference.
831 if (RegOut) {
832 DEBUG(dbgs() << ", no uses, live-through.\n");
833 SE.useIntv(Start, Stop);
834 } else {
835 DEBUG(dbgs() << ", no uses, stack-out.\n");
836 SE.leaveIntvAtTop(*BI.MBB);
837 }
838 continue;
839 }
840 if (!BI.LiveThrough) {
841 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000842 SE.useIntv(Start, SE.leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000843 continue;
844 }
845 if (!RegOut) {
846 // Block is live-through, but exit bundle is on the stack.
847 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000848 if (BI.LastUse < BI.LastSplitPoint) {
849 DEBUG(dbgs() << ", uses, stack-out.\n");
850 SE.useIntv(Start, SE.leaveIntvAfter(BI.LastUse));
851 continue;
852 }
853 // The last use is after the last split point, it is probably an
854 // indirect jump.
855 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
856 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesen23cd57c2011-02-09 23:33:02 +0000857 SlotIndex SegEnd = SE.leaveIntvBefore(BI.LastSplitPoint);
858 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000859 // Run a double interval from the split to the last use.
860 // This makes it possible to spill the complement without affecting the
861 // indirect branch.
862 SE.overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000863 continue;
864 }
865 // Register is live-through.
866 DEBUG(dbgs() << ", uses, live-through.\n");
867 SE.useIntv(Start, Stop);
868 continue;
869 }
870
871 // Block has interference.
872 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000873
874 if (!BI.LiveThrough && IP.first >= BI.Kill) {
875 // The interference doesn't reach the outgoing segment.
876 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
877 SE.useIntv(Start, BI.Kill);
878 continue;
879 }
880
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000881 if (!BI.Uses) {
882 // No uses in block, avoid interference by spilling as soon as possible.
883 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000884 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
885 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000886 continue;
887 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000888 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000889 // There are interference-free uses at the beginning of the block.
890 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000891 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000892 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
893 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000894 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
895 SlotIndex Use = (--UI)->getBoundaryIndex();
896 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000897 SlotIndex SegEnd = SE.leaveIntvAfter(Use);
898 assert(SegEnd <= IP.first && "Couldn't avoid interference");
899 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000900 continue;
901 }
902
903 // Interference is before the first use.
904 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000905 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
906 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000907 }
908
909 SE.closeIntv();
910
911 // FIXME: Should we be more aggressive about splitting the stack region into
912 // per-block segments? The current approach allows the stack region to
913 // separate into connected components. Some components may be allocatable.
914 SE.finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000915 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000916
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000917 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000918 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000919
920#ifndef NDEBUG
921 // Make sure that at least one of the new intervals can allocate to PhysReg.
922 // That was the whole point of splitting the live range.
923 bool found = false;
924 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
925 ++I)
926 if (!checkUncachedInterference(**I, PhysReg)) {
927 found = true;
928 break;
929 }
930 assert(found && "No allocatable intervals after pointless splitting");
931#endif
932 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000933}
934
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000935unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
936 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000937 BitVector LiveBundles, BestBundles;
938 float BestCost = 0;
939 unsigned BestReg = 0;
940 Order.rewind();
941 while (unsigned PhysReg = Order.next()) {
942 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
943 if (BestReg && Cost >= BestCost)
944 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000945
946 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
947 // No live bundles, defer to splitSingleBlocks().
948 if (!LiveBundles.any())
949 continue;
950
951 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000952 if (!BestReg || Cost < BestCost) {
953 BestReg = PhysReg;
954 BestCost = Cost;
955 BestBundles.swap(LiveBundles);
956 }
957 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000958
959 if (!BestReg)
960 return 0;
961
962 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000963 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Region);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000964 return 0;
965}
966
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000967
968//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000969// Local Splitting
970//===----------------------------------------------------------------------===//
971
972
973/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
974/// in order to use PhysReg between two entries in SA->UseSlots.
975///
976/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
977///
978void RAGreedy::calcGapWeights(unsigned PhysReg,
979 SmallVectorImpl<float> &GapWeight) {
980 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
981 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
982 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
983 const unsigned NumGaps = Uses.size()-1;
984
985 // Start and end points for the interference check.
986 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
987 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
988
989 GapWeight.assign(NumGaps, 0.0f);
990
991 // Add interference from each overlapping register.
992 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
993 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
994 .checkInterference())
995 continue;
996
997 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
998 // so we don't need InterferenceQuery.
999 //
1000 // Interference that overlaps an instruction is counted in both gaps
1001 // surrounding the instruction. The exception is interference before
1002 // StartIdx and after StopIdx.
1003 //
1004 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
1005 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
1006 // Skip the gaps before IntI.
1007 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
1008 if (++Gap == NumGaps)
1009 break;
1010 if (Gap == NumGaps)
1011 break;
1012
1013 // Update the gaps covered by IntI.
1014 const float weight = IntI.value()->weight;
1015 for (; Gap != NumGaps; ++Gap) {
1016 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
1017 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
1018 break;
1019 }
1020 if (Gap == NumGaps)
1021 break;
1022 }
1023 }
1024}
1025
1026/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
1027/// before MI that has a slot index. If MI is the first mapped instruction in
1028/// its block, return the block start index instead.
1029///
1030SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
1031 assert(MI && "Missing MachineInstr");
1032 const MachineBasicBlock *MBB = MI->getParent();
1033 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
1034 while (I != B)
1035 if (!(--I)->isDebugValue() && !I->isCopy())
1036 return Indexes->getInstructionIndex(I);
1037 return Indexes->getMBBStartIdx(MBB);
1038}
1039
1040/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
1041/// real non-copy instruction for each instruction in SA->UseSlots.
1042///
1043void RAGreedy::calcPrevSlots() {
1044 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1045 PrevSlot.clear();
1046 PrevSlot.reserve(Uses.size());
1047 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
1048 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
1049 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
1050 }
1051}
1052
1053/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
1054/// be beneficial to split before UseSlots[i].
1055///
1056/// 0 is always a valid split point
1057unsigned RAGreedy::nextSplitPoint(unsigned i) {
1058 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1059 const unsigned Size = Uses.size();
1060 assert(i != Size && "No split points after the end");
1061 // Allow split before i when Uses[i] is not adjacent to the previous use.
1062 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
1063 ;
1064 return i;
1065}
1066
1067/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1068/// basic block.
1069///
1070unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1071 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1072 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
1073 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
1074
1075 // Note that it is possible to have an interval that is live-in or live-out
1076 // while only covering a single block - A phi-def can use undef values from
1077 // predecessors, and the block could be a single-block loop.
1078 // We don't bother doing anything clever about such a case, we simply assume
1079 // that the interval is continuous from FirstUse to LastUse. We should make
1080 // sure that we don't do anything illegal to such an interval, though.
1081
1082 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1083 if (Uses.size() <= 2)
1084 return 0;
1085 const unsigned NumGaps = Uses.size()-1;
1086
1087 DEBUG({
1088 dbgs() << "tryLocalSplit: ";
1089 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1090 dbgs() << ' ' << SA->UseSlots[i];
1091 dbgs() << '\n';
1092 });
1093
1094 // For every use, find the previous mapped non-copy instruction.
1095 // We use this to detect valid split points, and to estimate new interval
1096 // sizes.
1097 calcPrevSlots();
1098
1099 unsigned BestBefore = NumGaps;
1100 unsigned BestAfter = 0;
1101 float BestDiff = 0;
1102
1103 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB);
1104 SmallVector<float, 8> GapWeight;
1105
1106 Order.rewind();
1107 while (unsigned PhysReg = Order.next()) {
1108 // Keep track of the largest spill weight that would need to be evicted in
1109 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1110 calcGapWeights(PhysReg, GapWeight);
1111
1112 // Try to find the best sequence of gaps to close.
1113 // The new spill weight must be larger than any gap interference.
1114
1115 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1116 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1117
1118 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1119 // It is the spill weight that needs to be evicted.
1120 float MaxGap = GapWeight[0];
1121 for (unsigned i = 1; i != SplitAfter; ++i)
1122 MaxGap = std::max(MaxGap, GapWeight[i]);
1123
1124 for (;;) {
1125 // Live before/after split?
1126 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1127 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1128
1129 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1130 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1131 << " i=" << MaxGap);
1132
1133 // Stop before the interval gets so big we wouldn't be making progress.
1134 if (!LiveBefore && !LiveAfter) {
1135 DEBUG(dbgs() << " all\n");
1136 break;
1137 }
1138 // Should the interval be extended or shrunk?
1139 bool Shrink = true;
1140 if (MaxGap < HUGE_VALF) {
1141 // Estimate the new spill weight.
1142 //
1143 // Each instruction reads and writes the register, except the first
1144 // instr doesn't read when !FirstLive, and the last instr doesn't write
1145 // when !LastLive.
1146 //
1147 // We will be inserting copies before and after, so the total number of
1148 // reads and writes is 2 * EstUses.
1149 //
1150 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1151 2*(LiveBefore + LiveAfter);
1152
1153 // Try to guess the size of the new interval. This should be trivial,
1154 // but the slot index of an inserted copy can be a lot smaller than the
1155 // instruction it is inserted before if there are many dead indexes
1156 // between them.
1157 //
1158 // We measure the distance from the instruction before SplitBefore to
1159 // get a conservative estimate.
1160 //
1161 // The final distance can still be different if inserting copies
1162 // triggers a slot index renumbering.
1163 //
1164 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1165 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1166 // Would this split be possible to allocate?
1167 // Never allocate all gaps, we wouldn't be making progress.
1168 float Diff = EstWeight - MaxGap;
1169 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1170 if (Diff > 0) {
1171 Shrink = false;
1172 if (Diff > BestDiff) {
1173 DEBUG(dbgs() << " (best)");
1174 BestDiff = Diff;
1175 BestBefore = SplitBefore;
1176 BestAfter = SplitAfter;
1177 }
1178 }
1179 }
1180
1181 // Try to shrink.
1182 if (Shrink) {
1183 SplitBefore = nextSplitPoint(SplitBefore);
1184 if (SplitBefore < SplitAfter) {
1185 DEBUG(dbgs() << " shrink\n");
1186 // Recompute the max when necessary.
1187 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1188 MaxGap = GapWeight[SplitBefore];
1189 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1190 MaxGap = std::max(MaxGap, GapWeight[i]);
1191 }
1192 continue;
1193 }
1194 MaxGap = 0;
1195 }
1196
1197 // Try to extend the interval.
1198 if (SplitAfter >= NumGaps) {
1199 DEBUG(dbgs() << " end\n");
1200 break;
1201 }
1202
1203 DEBUG(dbgs() << " extend\n");
1204 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1205 SplitAfter != e; ++SplitAfter)
1206 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1207 continue;
1208 }
1209 }
1210
1211 // Didn't find any candidates?
1212 if (BestBefore == NumGaps)
1213 return 0;
1214
1215 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1216 << '-' << Uses[BestAfter] << ", " << BestDiff
1217 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1218
1219 SmallVector<LiveInterval*, 4> SpillRegs;
1220 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1221 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
1222
1223 SE.openIntv();
1224 SlotIndex SegStart = SE.enterIntvBefore(Uses[BestBefore]);
1225 SlotIndex SegStop = SE.leaveIntvAfter(Uses[BestAfter]);
1226 SE.useIntv(SegStart, SegStop);
1227 SE.closeIntv();
1228 SE.finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001229 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001230 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001231
1232 return 0;
1233}
1234
1235//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001236// Live Range Splitting
1237//===----------------------------------------------------------------------===//
1238
1239/// trySplit - Try to split VirtReg or one of its interferences, making it
1240/// assignable.
1241/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1242unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1243 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001244 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001245 if (LIS->intervalIsInOneMBB(VirtReg)) {
1246 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001247 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001248 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001249 }
1250
1251 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001252
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001253 // Don't iterate global splitting.
1254 // Move straight to spilling if this range was produced by a global split.
1255 LiveRangeStage Stage = getStage(VirtReg);
1256 if (Stage >= RS_Block)
1257 return 0;
1258
1259 SA->analyze(&VirtReg);
1260
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001261 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001262 if (Stage < RS_Region) {
1263 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1264 if (PhysReg || !NewVRegs.empty())
1265 return PhysReg;
1266 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001267
1268 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001269 if (Stage < RS_Block) {
1270 SplitAnalysis::BlockPtrSet Blocks;
1271 if (SA->getMultiUseBlocks(Blocks)) {
1272 SmallVector<LiveInterval*, 4> SpillRegs;
1273 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1274 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
1275 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Block);
1276 if (VerifyEnabled)
1277 MF->verify(this, "After splitting live range around basic blocks");
1278 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001279 }
1280
1281 // Don't assign any physregs.
1282 return 0;
1283}
1284
1285
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001286//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001287// Spilling
1288//===----------------------------------------------------------------------===//
1289
1290/// calcInterferenceWeight - Calculate the combined spill weight of
1291/// interferences when assigning VirtReg to PhysReg.
1292float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
1293 float Sum = 0;
1294 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1295 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1296 Q.collectInterferingVRegs();
1297 if (Q.seenUnspillableVReg())
1298 return HUGE_VALF;
1299 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
1300 Sum += Q.interferingVRegs()[i]->weight;
1301 }
1302 return Sum;
1303}
1304
1305/// trySpillInterferences - Try to spill interfering registers instead of the
1306/// current one. Only do it if the accumulated spill weight is smaller than the
1307/// current spill weight.
1308unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
1309 AllocationOrder &Order,
1310 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1311 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
1312 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +00001313 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001314
1315 Order.rewind();
1316 while (unsigned PhysReg = Order.next()) {
1317 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
1318 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
1319 continue;
1320 if (!BestPhys || Weight < BestWeight)
1321 BestPhys = PhysReg, BestWeight = Weight;
1322 }
1323
1324 // No candidates found.
1325 if (!BestPhys)
1326 return 0;
1327
1328 // Collect all interfering registers.
1329 SmallVector<LiveInterval*, 8> Spills;
1330 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
1331 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1332 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
1333 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
1334 LiveInterval *VReg = Q.interferingVRegs()[i];
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +00001335 unassign(*VReg, *AI);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001336 }
1337 }
1338
1339 // Spill them all.
1340 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
1341 << BestWeight << '\n');
1342 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
1343 spiller().spill(Spills[i], NewVRegs, Spills);
1344 return BestPhys;
1345}
1346
1347
1348//===----------------------------------------------------------------------===//
1349// Main Entry Point
1350//===----------------------------------------------------------------------===//
1351
1352unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001353 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001354 LiveRangeStage Stage = getStage(VirtReg);
1355 if (Stage == RS_Original)
1356 LRStage[VirtReg.reg] = RS_Second;
1357
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001358 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001359 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1360 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001361 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001362 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001363 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001364
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001365 if (unsigned PhysReg = tryReassign(VirtReg, Order, NewVRegs))
1366 return PhysReg;
1367
1368 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001369 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001370
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001371 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1372
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001373 // The first time we see a live range, don't try to split or spill.
1374 // Wait until the second time, when all smaller ranges have been allocated.
1375 // This gives a better picture of the interference to split around.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001376 if (Stage == RS_Original) {
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001377 NewVRegs.push_back(&VirtReg);
1378 return 0;
1379 }
1380
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001381 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1382
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001383 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001384 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1385 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001386 return PhysReg;
1387
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001388 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001389 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001390 if (PhysReg)
1391 return PhysReg;
1392
1393 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001394 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001395 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001396 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001397
1398 // The live virtual register requesting allocation was spilled, so tell
1399 // the caller not to allocate anything during this round.
1400 return 0;
1401}
1402
1403bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1404 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1405 << "********** Function: "
1406 << ((Value*)mf.getFunction())->getName() << '\n');
1407
1408 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001409 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001410 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001411
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001412 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001413 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001414 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001415 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001416 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001417 Loops = &getAnalysis<MachineLoopInfo>();
1418 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001419 Bundles = &getAnalysis<EdgeBundles>();
1420 SpillPlacer = &getAnalysis<SpillPlacement>();
1421
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001422 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001423 LRStage.clear();
1424 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001425
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001426 allocatePhysRegs();
1427 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001428 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001429
1430 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001431 {
1432 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001433 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001434 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001435
1436 // The pass output is in VirtRegMap. Release all the transient data.
1437 releaseMemory();
1438
1439 return true;
1440}