blob: 03886195404a8557f38cb067960f41e54a61e08a [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000024#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000046#include <queue>
47
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000048using namespace llvm;
49
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000050STATISTIC(NumGlobalSplits, "Number of split global live ranges");
51STATISTIC(NumLocalSplits, "Number of split local live ranges");
52STATISTIC(NumReassigned, "Number of interferences reassigned");
53STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
59class RAGreedy : public MachineFunctionPass, public RegAllocBase {
60 // context
61 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000062 BitVector ReservedRegs;
63
64 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000065 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000067 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 MachineLoopInfo *Loops;
69 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 EdgeBundles *Bundles;
71 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000072
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000073 // state
74 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000075 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000076 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +000077 IndexedMap<unsigned, VirtReg2IndexFunctor> Generation;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000078
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000079 // splitting state.
80
81 /// All basic blocks where the current register is live.
82 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
83
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +000084 /// For every instruction in SA->UseSlots, store the previous non-copy
85 /// instruction.
86 SmallVector<SlotIndex, 8> PrevSlot;
87
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000088public:
89 RAGreedy();
90
91 /// Return the pass name.
92 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000093 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000094 }
95
96 /// RAGreedy analysis usage.
97 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000098 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000099 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000100 virtual void enqueue(LiveInterval *LI);
101 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000102 virtual unsigned selectOrSplit(LiveInterval&,
103 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000104
105 /// Perform register allocation.
106 virtual bool runOnMachineFunction(MachineFunction &mf);
107
108 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000109
110private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000111 bool checkUncachedInterference(LiveInterval&, unsigned);
112 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000113 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000114 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000115 float calcInterferenceInfo(LiveInterval&, unsigned);
116 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000117 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
118 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000119 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
120 SlotIndex getPrevMappedIndex(const MachineInstr*);
121 void calcPrevSlots();
122 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000123 bool canEvictInterference(LiveInterval&, unsigned, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000124
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000125 unsigned tryReassign(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000126 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000127 unsigned tryEvict(LiveInterval&, AllocationOrder&,
128 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000129 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
130 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000131 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
132 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000133 unsigned trySplit(LiveInterval&, AllocationOrder&,
134 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000135 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
136 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000137};
138} // end anonymous namespace
139
140char RAGreedy::ID = 0;
141
142FunctionPass* llvm::createGreedyRegisterAllocator() {
143 return new RAGreedy();
144}
145
146RAGreedy::RAGreedy(): MachineFunctionPass(ID) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000147 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000148 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
149 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
150 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
151 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
152 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
153 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
154 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
155 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000156 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000157 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000158 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
159 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000160}
161
162void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
163 AU.setPreservesCFG();
164 AU.addRequired<AliasAnalysis>();
165 AU.addPreserved<AliasAnalysis>();
166 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000167 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000168 AU.addPreserved<SlotIndexes>();
169 if (StrongPHIElim)
170 AU.addRequiredID(StrongPHIEliminationID);
171 AU.addRequiredTransitive<RegisterCoalescer>();
172 AU.addRequired<CalculateSpillWeights>();
173 AU.addRequired<LiveStacks>();
174 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000175 AU.addRequired<MachineDominatorTree>();
176 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000177 AU.addRequired<MachineLoopInfo>();
178 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000179 AU.addRequired<MachineLoopRanges>();
180 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000181 AU.addRequired<VirtRegMap>();
182 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000183 AU.addRequired<EdgeBundles>();
184 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000185 MachineFunctionPass::getAnalysisUsage(AU);
186}
187
188void RAGreedy::releaseMemory() {
189 SpillerInstance.reset(0);
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000190 Generation.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000191 RegAllocBase::releaseMemory();
192}
193
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000194void RAGreedy::enqueue(LiveInterval *LI) {
195 // Prioritize live ranges by size, assigning larger ranges first.
196 // The queue holds (size, reg) pairs.
197 unsigned Size = LI->getSize();
198 unsigned Reg = LI->reg;
199 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
200 "Can only enqueue virtual registers");
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000201
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000202 // Boost ranges that have a physical register hint.
203 unsigned Hint = VRM->getRegAllocPref(Reg);
204 if (TargetRegisterInfo::isPhysicalRegister(Hint))
205 Size |= (1u << 30);
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000206
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000207 // Boost ranges that we see for the first time.
208 Generation.grow(Reg);
209 if (++Generation[Reg] == 1)
210 Size |= (1u << 31);
211
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000212 Queue.push(std::make_pair(Size, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000213}
214
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000215LiveInterval *RAGreedy::dequeue() {
216 if (Queue.empty())
217 return 0;
218 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
219 Queue.pop();
220 return LI;
221}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000222
223//===----------------------------------------------------------------------===//
224// Register Reassignment
225//===----------------------------------------------------------------------===//
226
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000227// Check interference without using the cache.
228bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
229 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000230 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
231 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000232 if (subQ.checkInterference())
233 return true;
234 }
235 return false;
236}
237
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000238/// getSingleInterference - Return the single interfering virtual register
239/// assigned to PhysReg. Return 0 if more than one virtual register is
240/// interfering.
241LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
242 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000243 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000244 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000245 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000246 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
247 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000248 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000249 return 0;
Jakob Stoklund Olesen417df012011-02-23 00:29:55 +0000250 if (Q.collectInterferingVRegs(2) > 1)
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000251 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000252 Interference = Q.interferingVRegs().front();
253 }
254 }
255 return Interference;
256}
257
Andrew Trickb853e6c2010-12-09 18:15:21 +0000258// Attempt to reassign this virtual register to a different physical register.
259//
260// FIXME: we are not yet caching these "second-level" interferences discovered
261// in the sub-queries. These interferences can change with each call to
262// selectOrSplit. However, we could implement a "may-interfere" cache that
263// could be conservatively dirtied when we reassign or split.
264//
265// FIXME: This may result in a lot of alias queries. We could summarize alias
266// live intervals in their parent register's live union, but it's messy.
267bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000268 unsigned WantedPhysReg) {
269 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
270 "Can only reassign virtual registers");
271 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000272 "inconsistent phys reg assigment");
273
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000274 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
275 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000276 // Don't reassign to a WantedPhysReg alias.
277 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000278 continue;
279
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000280 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000281 continue;
282
Andrew Trickb853e6c2010-12-09 18:15:21 +0000283 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000284 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
285 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
286 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000287 unassign(InterferingVReg, OldAssign);
288 assign(InterferingVReg, PhysReg);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000289 ++NumReassigned;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000290 return true;
291 }
292 return false;
293}
294
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000295/// tryReassign - Try to reassign a single interference to a different physreg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000296/// @param VirtReg Currently unassigned virtual register.
297/// @param Order Physregs to try.
298/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000299unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order,
300 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000301 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000302
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000303 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000304 while (unsigned PhysReg = Order.next()) {
305 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
306 if (!InterferingVReg)
307 continue;
308 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
309 continue;
310 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000311 return PhysReg;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000312 }
313 return 0;
314}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000315
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000316
317//===----------------------------------------------------------------------===//
318// Interference eviction
319//===----------------------------------------------------------------------===//
320
321/// canEvict - Return true if all interferences between VirtReg and PhysReg can
322/// be evicted. Set maxWeight to the maximal spill weight of an interference.
323bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
324 unsigned Size, float &MaxWeight) {
325 float Weight = 0;
326 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
327 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
328 // If there is 10 or more interferences, chances are one is smaller.
329 if (Q.collectInterferingVRegs(10) >= 10)
330 return false;
331
332 // CHeck if any interfering live range is shorter than VirtReg.
333 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
334 LiveInterval *Intf = Q.interferingVRegs()[i];
335 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
336 return false;
337 if (Intf->getSize() <= Size)
338 return false;
339 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000340 }
341 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000342 MaxWeight = Weight;
343 return true;
344}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000345
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000346/// tryEvict - Try to evict all interferences for a physreg.
347/// @param VirtReg Currently unassigned virtual register.
348/// @param Order Physregs to try.
349/// @return Physreg to assign VirtReg, or 0.
350unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
351 AllocationOrder &Order,
352 SmallVectorImpl<LiveInterval*> &NewVRegs){
353 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
354
355 // We can only evict interference if all interfering registers are virtual and
356 // longer than VirtReg.
357 const unsigned Size = VirtReg.getSize();
358
359 // Keep track of the lightest single interference seen so far.
360 float BestWeight = 0;
361 unsigned BestPhys = 0;
362
363 Order.rewind();
364 while (unsigned PhysReg = Order.next()) {
365 float Weight = 0;
366 if (!canEvictInterference(VirtReg, PhysReg, Size, Weight))
367 continue;
368
369 // This is an eviction candidate.
370 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
371 << Weight << '\n');
372 if (BestPhys && Weight >= BestWeight)
373 continue;
374
375 // Best so far.
376 BestPhys = PhysReg;
377 BestWeight = Weight;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000378 }
379
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000380 if (!BestPhys)
381 return 0;
382
383 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
384 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
385 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
386 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
387 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
388 LiveInterval *Intf = Q.interferingVRegs()[i];
389 unassign(*Intf, VRM->getPhys(Intf->reg));
390 ++NumEvicted;
391 NewVRegs.push_back(Intf);
392 }
393 }
394 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000395}
396
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000397
398//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000399// Region Splitting
400//===----------------------------------------------------------------------===//
401
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000402/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
403/// when considering interference from PhysReg. Also compute an optimistic local
404/// cost of this interference pattern.
405///
406/// The final cost of a split is the local cost + global cost of preferences
407/// broken by SpillPlacement.
408///
409float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
410 // Reset interference dependent info.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000411 SpillConstraints.resize(SA->LiveBlocks.size());
412 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
413 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000414 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000415 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000416 BC.Entry = (BI.Uses && BI.LiveIn) ?
417 SpillPlacement::PrefReg : SpillPlacement::DontCare;
418 BC.Exit = (BI.Uses && BI.LiveOut) ?
419 SpillPlacement::PrefReg : SpillPlacement::DontCare;
420 BI.OverlapEntry = BI.OverlapExit = false;
421 }
422
423 // Add interference info from each PhysReg alias.
424 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
425 if (!query(VirtReg, *AI).checkInterference())
426 continue;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000427 LiveIntervalUnion::SegmentIter IntI =
428 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
429 if (!IntI.valid())
430 continue;
431
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000432 // Determine which blocks have interference live in or after the last split
433 // point.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000434 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
435 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000436 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
437 SlotIndex Start, Stop;
438 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
439
440 // Skip interference-free blocks.
441 if (IntI.start() >= Stop)
442 continue;
443
444 // Is the interference live-in?
445 if (BI.LiveIn) {
446 IntI.advanceTo(Start);
447 if (!IntI.valid())
448 break;
449 if (IntI.start() <= Start)
450 BC.Entry = SpillPlacement::MustSpill;
451 }
452
453 // Is the interference overlapping the last split point?
454 if (BI.LiveOut) {
455 if (IntI.stop() < BI.LastSplitPoint)
456 IntI.advanceTo(BI.LastSplitPoint.getPrevSlot());
457 if (!IntI.valid())
458 break;
459 if (IntI.start() < Stop)
460 BC.Exit = SpillPlacement::MustSpill;
461 }
462 }
463
464 // Rewind iterator and check other interferences.
465 IntI.find(VirtReg.beginIndex());
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000466 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
467 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000468 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
469 SlotIndex Start, Stop;
470 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
471
472 // Skip interference-free blocks.
473 if (IntI.start() >= Stop)
474 continue;
475
476 // Handle transparent blocks with interference separately.
477 // Transparent blocks never incur any fixed cost.
478 if (BI.LiveThrough && !BI.Uses) {
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000479 IntI.advanceTo(Start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000480 if (!IntI.valid())
481 break;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000482 if (IntI.start() >= Stop)
483 continue;
484
485 if (BC.Entry != SpillPlacement::MustSpill)
486 BC.Entry = SpillPlacement::PrefSpill;
487 if (BC.Exit != SpillPlacement::MustSpill)
488 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000489 continue;
490 }
491
492 // Now we only have blocks with uses left.
493 // Check if the interference overlaps the uses.
494 assert(BI.Uses && "Non-transparent block without any uses");
495
496 // Check interference on entry.
497 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
498 IntI.advanceTo(Start);
499 if (!IntI.valid())
500 break;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000501 // Not live in, but before the first use.
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000502 if (IntI.start() < BI.FirstUse) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000503 BC.Entry = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000504 // If the block contains a kill from an earlier split, never split
505 // again in the same block.
506 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Kill))
507 BC.Entry = SpillPlacement::MustSpill;
508 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000509 }
510
511 // Does interference overlap the uses in the entry segment
512 // [FirstUse;Kill)?
513 if (BI.LiveIn && !BI.OverlapEntry) {
514 IntI.advanceTo(BI.FirstUse);
515 if (!IntI.valid())
516 break;
517 // A live-through interval has no kill.
518 // Check [FirstUse;LastUse) instead.
519 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
520 BI.OverlapEntry = true;
521 }
522
523 // Does interference overlap the uses in the exit segment [Def;LastUse)?
524 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
525 IntI.advanceTo(BI.Def);
526 if (!IntI.valid())
527 break;
528 if (IntI.start() < BI.LastUse)
529 BI.OverlapExit = true;
530 }
531
532 // Check interference on exit.
533 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
534 // Check interference between LastUse and Stop.
535 if (BC.Exit != SpillPlacement::PrefSpill) {
536 IntI.advanceTo(BI.LastUse);
537 if (!IntI.valid())
538 break;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000539 if (IntI.start() < Stop) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000540 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesen06c0f252011-02-21 23:09:46 +0000541 // Avoid splitting twice in the same block.
542 if (!BI.LiveThrough && !SA->isOriginalEndpoint(BI.Def))
543 BC.Exit = SpillPlacement::MustSpill;
544 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000545 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000546 }
547 }
548 }
549
550 // Accumulate a local cost of this interference pattern.
551 float LocalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000552 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
553 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000554 if (!BI.Uses)
555 continue;
556 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
557 unsigned Inserts = 0;
558
559 // Do we need spill code for the entry segment?
560 if (BI.LiveIn)
561 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
562
563 // For the exit segment?
564 if (BI.LiveOut)
565 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
566
567 // The local cost of spill code in this block is the block frequency times
568 // the number of spill instructions inserted.
569 if (Inserts)
570 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
571 }
572 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
573 << LocalCost << '\n');
574 return LocalCost;
575}
576
577/// calcGlobalSplitCost - Return the global split cost of following the split
578/// pattern in LiveBundles. This cost should be added to the local cost of the
579/// interference pattern in SpillConstraints.
580///
581float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
582 float GlobalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000583 for (unsigned i = 0, e = SpillConstraints.size(); i != e; ++i) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000584 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
585 unsigned Inserts = 0;
586 // Broken entry preference?
587 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
588 (BC.Entry == SpillPlacement::PrefReg);
589 // Broken exit preference?
590 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
591 (BC.Exit == SpillPlacement::PrefReg);
592 if (Inserts)
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000593 GlobalCost +=
594 Inserts * SpillPlacer->getBlockFrequency(SA->LiveBlocks[i].MBB);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000595 }
596 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
597 return GlobalCost;
598}
599
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000600/// splitAroundRegion - Split VirtReg around the region determined by
601/// LiveBundles. Make an effort to avoid interference from PhysReg.
602///
603/// The 'register' interval is going to contain as many uses as possible while
604/// avoiding interference. The 'stack' interval is the complement constructed by
605/// SplitEditor. It will contain the rest.
606///
607void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
608 const BitVector &LiveBundles,
609 SmallVectorImpl<LiveInterval*> &NewVRegs) {
610 DEBUG({
611 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
612 << " with bundles";
613 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
614 dbgs() << " EB#" << i;
615 dbgs() << ".\n";
616 });
617
618 // First compute interference ranges in the live blocks.
619 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
620 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000621 InterferenceRanges.resize(SA->LiveBlocks.size());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000622 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
623 if (!query(VirtReg, *AI).checkInterference())
624 continue;
625 LiveIntervalUnion::SegmentIter IntI =
626 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
627 if (!IntI.valid())
628 continue;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000629 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
630 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000631 IndexPair &IP = InterferenceRanges[i];
632 SlotIndex Start, Stop;
633 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
634 // Skip interference-free blocks.
635 if (IntI.start() >= Stop)
636 continue;
637
638 // First interference in block.
639 if (BI.LiveIn) {
640 IntI.advanceTo(Start);
641 if (!IntI.valid())
642 break;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000643 if (IntI.start() >= Stop)
644 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000645 if (!IP.first.isValid() || IntI.start() < IP.first)
646 IP.first = IntI.start();
647 }
648
649 // Last interference in block.
650 if (BI.LiveOut) {
651 IntI.advanceTo(Stop);
652 if (!IntI.valid() || IntI.start() >= Stop)
653 --IntI;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000654 if (IntI.stop() <= Start)
655 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000656 if (!IP.second.isValid() || IntI.stop() > IP.second)
657 IP.second = IntI.stop();
658 }
659 }
660 }
661
662 SmallVector<LiveInterval*, 4> SpillRegs;
663 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
664 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
665
666 // Create the main cross-block interval.
667 SE.openIntv();
668
669 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000670 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
671 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000672 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
673 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
674
675 // Should the register be live out?
676 if (!BI.LiveOut || !RegOut)
677 continue;
678
679 IndexPair &IP = InterferenceRanges[i];
680 SlotIndex Start, Stop;
681 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
682
683 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000684 << Bundles->getBundle(BI.MBB->getNumber(), 1)
685 << " intf [" << IP.first << ';' << IP.second << ')');
686
687 // The interference interval should either be invalid or overlap MBB.
688 assert((!IP.first.isValid() || IP.first < Stop) && "Bad interference");
689 assert((!IP.second.isValid() || IP.second > Start) && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000690
691 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000692 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000693 // Block is interference-free.
694 DEBUG(dbgs() << ", no interference");
695 if (!BI.Uses) {
696 assert(BI.LiveThrough && "No uses, but not live through block?");
697 // Block is live-through without interference.
698 DEBUG(dbgs() << ", no uses"
699 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
700 if (!RegIn)
701 SE.enterIntvAtEnd(*BI.MBB);
702 continue;
703 }
704 if (!BI.LiveThrough) {
705 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000706 SE.useIntv(SE.enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000707 continue;
708 }
709 if (!RegIn) {
710 // Block is live-through, but entry bundle is on the stack.
711 // Reload just before the first use.
712 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000713 SE.useIntv(SE.enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000714 continue;
715 }
716 DEBUG(dbgs() << ", live-through.\n");
717 continue;
718 }
719
720 // Block has interference.
721 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000722
723 if (!BI.LiveThrough && IP.second <= BI.Def) {
724 // The interference doesn't reach the outgoing segment.
725 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
726 SE.useIntv(BI.Def, Stop);
727 continue;
728 }
729
730
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000731 if (!BI.Uses) {
732 // No uses in block, avoid interference by reloading as late as possible.
733 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000734 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
735 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000736 continue;
737 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000738
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000739 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000740 // There are interference-free uses at the end of the block.
741 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000742 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000743 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
744 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000745 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
746 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000747 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000748 // Only attempt a split befroe the last split point.
749 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
750 DEBUG(dbgs() << ", free use at " << Use << ".\n");
751 SlotIndex SegStart = SE.enterIntvBefore(Use);
752 assert(SegStart >= IP.second && "Couldn't avoid interference");
753 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
754 SE.useIntv(SegStart, Stop);
755 continue;
756 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000757 }
758
759 // Interference is after the last use.
760 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000761 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
762 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000763 }
764
765 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000766 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
767 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000768 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
769 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
770
771 // Is the register live-in?
772 if (!BI.LiveIn || !RegIn)
773 continue;
774
775 // We have an incoming register. Check for interference.
776 IndexPair &IP = InterferenceRanges[i];
777 SlotIndex Start, Stop;
778 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
779
780 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
781 << " -> BB#" << BI.MBB->getNumber());
782
783 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000784 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000785 // Block is interference-free.
786 DEBUG(dbgs() << ", no interference");
787 if (!BI.Uses) {
788 assert(BI.LiveThrough && "No uses, but not live through block?");
789 // Block is live-through without interference.
790 if (RegOut) {
791 DEBUG(dbgs() << ", no uses, live-through.\n");
792 SE.useIntv(Start, Stop);
793 } else {
794 DEBUG(dbgs() << ", no uses, stack-out.\n");
795 SE.leaveIntvAtTop(*BI.MBB);
796 }
797 continue;
798 }
799 if (!BI.LiveThrough) {
800 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000801 SE.useIntv(Start, SE.leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000802 continue;
803 }
804 if (!RegOut) {
805 // Block is live-through, but exit bundle is on the stack.
806 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000807 if (BI.LastUse < BI.LastSplitPoint) {
808 DEBUG(dbgs() << ", uses, stack-out.\n");
809 SE.useIntv(Start, SE.leaveIntvAfter(BI.LastUse));
810 continue;
811 }
812 // The last use is after the last split point, it is probably an
813 // indirect jump.
814 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
815 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesen23cd57c2011-02-09 23:33:02 +0000816 SlotIndex SegEnd = SE.leaveIntvBefore(BI.LastSplitPoint);
817 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000818 // Run a double interval from the split to the last use.
819 // This makes it possible to spill the complement without affecting the
820 // indirect branch.
821 SE.overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000822 continue;
823 }
824 // Register is live-through.
825 DEBUG(dbgs() << ", uses, live-through.\n");
826 SE.useIntv(Start, Stop);
827 continue;
828 }
829
830 // Block has interference.
831 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000832
833 if (!BI.LiveThrough && IP.first >= BI.Kill) {
834 // The interference doesn't reach the outgoing segment.
835 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
836 SE.useIntv(Start, BI.Kill);
837 continue;
838 }
839
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000840 if (!BI.Uses) {
841 // No uses in block, avoid interference by spilling as soon as possible.
842 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000843 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
844 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000845 continue;
846 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000847 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000848 // There are interference-free uses at the beginning of the block.
849 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000850 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000851 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
852 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000853 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
854 SlotIndex Use = (--UI)->getBoundaryIndex();
855 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000856 SlotIndex SegEnd = SE.leaveIntvAfter(Use);
857 assert(SegEnd <= IP.first && "Couldn't avoid interference");
858 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000859 continue;
860 }
861
862 // Interference is before the first use.
863 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000864 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
865 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000866 }
867
868 SE.closeIntv();
869
870 // FIXME: Should we be more aggressive about splitting the stack region into
871 // per-block segments? The current approach allows the stack region to
872 // separate into connected components. Some components may be allocatable.
873 SE.finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000874 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000875
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000876 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000877 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000878
879#ifndef NDEBUG
880 // Make sure that at least one of the new intervals can allocate to PhysReg.
881 // That was the whole point of splitting the live range.
882 bool found = false;
883 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
884 ++I)
885 if (!checkUncachedInterference(**I, PhysReg)) {
886 found = true;
887 break;
888 }
889 assert(found && "No allocatable intervals after pointless splitting");
890#endif
891 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000892}
893
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000894unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
895 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000896 BitVector LiveBundles, BestBundles;
897 float BestCost = 0;
898 unsigned BestReg = 0;
899 Order.rewind();
900 while (unsigned PhysReg = Order.next()) {
901 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
902 if (BestReg && Cost >= BestCost)
903 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000904
905 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
906 // No live bundles, defer to splitSingleBlocks().
907 if (!LiveBundles.any())
908 continue;
909
910 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000911 if (!BestReg || Cost < BestCost) {
912 BestReg = PhysReg;
913 BestCost = Cost;
914 BestBundles.swap(LiveBundles);
915 }
916 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000917
918 if (!BestReg)
919 return 0;
920
921 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000922 return 0;
923}
924
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000925
926//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000927// Local Splitting
928//===----------------------------------------------------------------------===//
929
930
931/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
932/// in order to use PhysReg between two entries in SA->UseSlots.
933///
934/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
935///
936void RAGreedy::calcGapWeights(unsigned PhysReg,
937 SmallVectorImpl<float> &GapWeight) {
938 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
939 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
940 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
941 const unsigned NumGaps = Uses.size()-1;
942
943 // Start and end points for the interference check.
944 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
945 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
946
947 GapWeight.assign(NumGaps, 0.0f);
948
949 // Add interference from each overlapping register.
950 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
951 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
952 .checkInterference())
953 continue;
954
955 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
956 // so we don't need InterferenceQuery.
957 //
958 // Interference that overlaps an instruction is counted in both gaps
959 // surrounding the instruction. The exception is interference before
960 // StartIdx and after StopIdx.
961 //
962 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
963 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
964 // Skip the gaps before IntI.
965 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
966 if (++Gap == NumGaps)
967 break;
968 if (Gap == NumGaps)
969 break;
970
971 // Update the gaps covered by IntI.
972 const float weight = IntI.value()->weight;
973 for (; Gap != NumGaps; ++Gap) {
974 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
975 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
976 break;
977 }
978 if (Gap == NumGaps)
979 break;
980 }
981 }
982}
983
984/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
985/// before MI that has a slot index. If MI is the first mapped instruction in
986/// its block, return the block start index instead.
987///
988SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
989 assert(MI && "Missing MachineInstr");
990 const MachineBasicBlock *MBB = MI->getParent();
991 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
992 while (I != B)
993 if (!(--I)->isDebugValue() && !I->isCopy())
994 return Indexes->getInstructionIndex(I);
995 return Indexes->getMBBStartIdx(MBB);
996}
997
998/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
999/// real non-copy instruction for each instruction in SA->UseSlots.
1000///
1001void RAGreedy::calcPrevSlots() {
1002 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1003 PrevSlot.clear();
1004 PrevSlot.reserve(Uses.size());
1005 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
1006 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
1007 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
1008 }
1009}
1010
1011/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
1012/// be beneficial to split before UseSlots[i].
1013///
1014/// 0 is always a valid split point
1015unsigned RAGreedy::nextSplitPoint(unsigned i) {
1016 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1017 const unsigned Size = Uses.size();
1018 assert(i != Size && "No split points after the end");
1019 // Allow split before i when Uses[i] is not adjacent to the previous use.
1020 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
1021 ;
1022 return i;
1023}
1024
1025/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1026/// basic block.
1027///
1028unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1029 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1030 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
1031 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
1032
1033 // Note that it is possible to have an interval that is live-in or live-out
1034 // while only covering a single block - A phi-def can use undef values from
1035 // predecessors, and the block could be a single-block loop.
1036 // We don't bother doing anything clever about such a case, we simply assume
1037 // that the interval is continuous from FirstUse to LastUse. We should make
1038 // sure that we don't do anything illegal to such an interval, though.
1039
1040 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1041 if (Uses.size() <= 2)
1042 return 0;
1043 const unsigned NumGaps = Uses.size()-1;
1044
1045 DEBUG({
1046 dbgs() << "tryLocalSplit: ";
1047 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1048 dbgs() << ' ' << SA->UseSlots[i];
1049 dbgs() << '\n';
1050 });
1051
1052 // For every use, find the previous mapped non-copy instruction.
1053 // We use this to detect valid split points, and to estimate new interval
1054 // sizes.
1055 calcPrevSlots();
1056
1057 unsigned BestBefore = NumGaps;
1058 unsigned BestAfter = 0;
1059 float BestDiff = 0;
1060
1061 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB);
1062 SmallVector<float, 8> GapWeight;
1063
1064 Order.rewind();
1065 while (unsigned PhysReg = Order.next()) {
1066 // Keep track of the largest spill weight that would need to be evicted in
1067 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1068 calcGapWeights(PhysReg, GapWeight);
1069
1070 // Try to find the best sequence of gaps to close.
1071 // The new spill weight must be larger than any gap interference.
1072
1073 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1074 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1075
1076 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1077 // It is the spill weight that needs to be evicted.
1078 float MaxGap = GapWeight[0];
1079 for (unsigned i = 1; i != SplitAfter; ++i)
1080 MaxGap = std::max(MaxGap, GapWeight[i]);
1081
1082 for (;;) {
1083 // Live before/after split?
1084 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1085 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1086
1087 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1088 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1089 << " i=" << MaxGap);
1090
1091 // Stop before the interval gets so big we wouldn't be making progress.
1092 if (!LiveBefore && !LiveAfter) {
1093 DEBUG(dbgs() << " all\n");
1094 break;
1095 }
1096 // Should the interval be extended or shrunk?
1097 bool Shrink = true;
1098 if (MaxGap < HUGE_VALF) {
1099 // Estimate the new spill weight.
1100 //
1101 // Each instruction reads and writes the register, except the first
1102 // instr doesn't read when !FirstLive, and the last instr doesn't write
1103 // when !LastLive.
1104 //
1105 // We will be inserting copies before and after, so the total number of
1106 // reads and writes is 2 * EstUses.
1107 //
1108 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1109 2*(LiveBefore + LiveAfter);
1110
1111 // Try to guess the size of the new interval. This should be trivial,
1112 // but the slot index of an inserted copy can be a lot smaller than the
1113 // instruction it is inserted before if there are many dead indexes
1114 // between them.
1115 //
1116 // We measure the distance from the instruction before SplitBefore to
1117 // get a conservative estimate.
1118 //
1119 // The final distance can still be different if inserting copies
1120 // triggers a slot index renumbering.
1121 //
1122 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1123 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1124 // Would this split be possible to allocate?
1125 // Never allocate all gaps, we wouldn't be making progress.
1126 float Diff = EstWeight - MaxGap;
1127 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1128 if (Diff > 0) {
1129 Shrink = false;
1130 if (Diff > BestDiff) {
1131 DEBUG(dbgs() << " (best)");
1132 BestDiff = Diff;
1133 BestBefore = SplitBefore;
1134 BestAfter = SplitAfter;
1135 }
1136 }
1137 }
1138
1139 // Try to shrink.
1140 if (Shrink) {
1141 SplitBefore = nextSplitPoint(SplitBefore);
1142 if (SplitBefore < SplitAfter) {
1143 DEBUG(dbgs() << " shrink\n");
1144 // Recompute the max when necessary.
1145 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1146 MaxGap = GapWeight[SplitBefore];
1147 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1148 MaxGap = std::max(MaxGap, GapWeight[i]);
1149 }
1150 continue;
1151 }
1152 MaxGap = 0;
1153 }
1154
1155 // Try to extend the interval.
1156 if (SplitAfter >= NumGaps) {
1157 DEBUG(dbgs() << " end\n");
1158 break;
1159 }
1160
1161 DEBUG(dbgs() << " extend\n");
1162 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1163 SplitAfter != e; ++SplitAfter)
1164 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1165 continue;
1166 }
1167 }
1168
1169 // Didn't find any candidates?
1170 if (BestBefore == NumGaps)
1171 return 0;
1172
1173 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1174 << '-' << Uses[BestAfter] << ", " << BestDiff
1175 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1176
1177 SmallVector<LiveInterval*, 4> SpillRegs;
1178 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1179 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
1180
1181 SE.openIntv();
1182 SlotIndex SegStart = SE.enterIntvBefore(Uses[BestBefore]);
1183 SlotIndex SegStop = SE.leaveIntvAfter(Uses[BestAfter]);
1184 SE.useIntv(SegStart, SegStop);
1185 SE.closeIntv();
1186 SE.finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001187 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001188
1189 return 0;
1190}
1191
1192//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001193// Live Range Splitting
1194//===----------------------------------------------------------------------===//
1195
1196/// trySplit - Try to split VirtReg or one of its interferences, making it
1197/// assignable.
1198/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1199unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1200 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001201 SA->analyze(&VirtReg);
1202
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001203 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001204 if (LIS->intervalIsInOneMBB(VirtReg)) {
1205 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001206 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001207 }
1208
1209 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001210
1211 // First try to split around a region spanning multiple blocks.
1212 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1213 if (PhysReg || !NewVRegs.empty())
1214 return PhysReg;
1215
1216 // Then isolate blocks with multiple uses.
1217 SplitAnalysis::BlockPtrSet Blocks;
1218 if (SA->getMultiUseBlocks(Blocks)) {
1219 SmallVector<LiveInterval*, 4> SpillRegs;
1220 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1221 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +00001222 if (VerifyEnabled)
1223 MF->verify(this, "After splitting live range around basic blocks");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001224 }
1225
1226 // Don't assign any physregs.
1227 return 0;
1228}
1229
1230
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001231//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001232// Spilling
1233//===----------------------------------------------------------------------===//
1234
1235/// calcInterferenceWeight - Calculate the combined spill weight of
1236/// interferences when assigning VirtReg to PhysReg.
1237float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
1238 float Sum = 0;
1239 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1240 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1241 Q.collectInterferingVRegs();
1242 if (Q.seenUnspillableVReg())
1243 return HUGE_VALF;
1244 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
1245 Sum += Q.interferingVRegs()[i]->weight;
1246 }
1247 return Sum;
1248}
1249
1250/// trySpillInterferences - Try to spill interfering registers instead of the
1251/// current one. Only do it if the accumulated spill weight is smaller than the
1252/// current spill weight.
1253unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
1254 AllocationOrder &Order,
1255 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1256 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
1257 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +00001258 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001259
1260 Order.rewind();
1261 while (unsigned PhysReg = Order.next()) {
1262 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
1263 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
1264 continue;
1265 if (!BestPhys || Weight < BestWeight)
1266 BestPhys = PhysReg, BestWeight = Weight;
1267 }
1268
1269 // No candidates found.
1270 if (!BestPhys)
1271 return 0;
1272
1273 // Collect all interfering registers.
1274 SmallVector<LiveInterval*, 8> Spills;
1275 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
1276 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1277 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
1278 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
1279 LiveInterval *VReg = Q.interferingVRegs()[i];
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +00001280 unassign(*VReg, *AI);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001281 }
1282 }
1283
1284 // Spill them all.
1285 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
1286 << BestWeight << '\n');
1287 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
1288 spiller().spill(Spills[i], NewVRegs, Spills);
1289 return BestPhys;
1290}
1291
1292
1293//===----------------------------------------------------------------------===//
1294// Main Entry Point
1295//===----------------------------------------------------------------------===//
1296
1297unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001298 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001299 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001300 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1301 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001302 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001303 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001304 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001305
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001306 if (unsigned PhysReg = tryReassign(VirtReg, Order, NewVRegs))
1307 return PhysReg;
1308
1309 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001310 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001311
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001312 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1313
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001314 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001315 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1316 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001317 return PhysReg;
1318
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001319 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001320 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001321 if (PhysReg)
1322 return PhysReg;
1323
1324 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001325 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001326 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001327 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001328
1329 // The live virtual register requesting allocation was spilled, so tell
1330 // the caller not to allocate anything during this round.
1331 return 0;
1332}
1333
1334bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1335 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1336 << "********** Function: "
1337 << ((Value*)mf.getFunction())->getName() << '\n');
1338
1339 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001340 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001341 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001342
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001343 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001344 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001345 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001346 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001347 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001348 Loops = &getAnalysis<MachineLoopInfo>();
1349 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001350 Bundles = &getAnalysis<EdgeBundles>();
1351 SpillPlacer = &getAnalysis<SpillPlacement>();
1352
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001353 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001354
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001355 allocatePhysRegs();
1356 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001357 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001358
1359 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001360 {
1361 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001362 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001363 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001364
1365 // The pass output is in VirtRegMap. Release all the transient data.
1366 releaseMemory();
1367
1368 return true;
1369}