blob: 4f1b811d539946a5e41cb7f4a98a03fb285856bc [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000024#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000046#include <queue>
47
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000048using namespace llvm;
49
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +000050STATISTIC(NumGlobalSplits, "Number of split global live ranges");
51STATISTIC(NumLocalSplits, "Number of split local live ranges");
52STATISTIC(NumReassigned, "Number of interferences reassigned");
53STATISTIC(NumEvicted, "Number of interferences evicted");
54
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
56 createGreedyRegisterAllocator);
57
58namespace {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +000059class RAGreedy : public MachineFunctionPass,
60 public RegAllocBase,
61 private LiveRangeEdit::Delegate {
62
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000063 // context
64 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000065 BitVector ReservedRegs;
66
67 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000068 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000070 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000071 MachineLoopInfo *Loops;
72 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000073 EdgeBundles *Bundles;
74 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000075
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000076 // state
77 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +000078 std::priority_queue<std::pair<unsigned, unsigned> > Queue;
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +000079
80 // Live ranges pass through a number of stages as we try to allocate them.
81 // Some of the stages may also create new live ranges:
82 //
83 // - Region splitting.
84 // - Per-block splitting.
85 // - Local splitting.
86 // - Spilling.
87 //
88 // Ranges produced by one of the stages skip the previous stages when they are
89 // dequeued. This improves performance because we can skip interference checks
90 // that are unlikely to give any results. It also guarantees that the live
91 // range splitting algorithm terminates, something that is otherwise hard to
92 // ensure.
93 enum LiveRangeStage {
94 RS_Original, ///< Never seen before, never split.
95 RS_Second, ///< Second time in the queue.
96 RS_Region, ///< Produced by region splitting.
97 RS_Block, ///< Produced by per-block splitting.
98 RS_Local, ///< Produced by local splitting.
99 RS_Spill ///< Produced by spilling.
100 };
101
102 IndexedMap<unsigned char, VirtReg2IndexFunctor> LRStage;
103
104 LiveRangeStage getStage(const LiveInterval &VirtReg) const {
105 return LiveRangeStage(LRStage[VirtReg.reg]);
106 }
107
108 template<typename Iterator>
109 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
110 LRStage.resize(MRI->getNumVirtRegs());
111 for (;Begin != End; ++Begin)
112 LRStage[(*Begin)->reg] = NewStage;
113 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000114
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000115 // splitting state.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000116 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000117 std::auto_ptr<SplitEditor> SE;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000118
119 /// All basic blocks where the current register is live.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000120 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000121
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000122 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
123
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000124 /// Global live range splitting candidate info.
125 struct GlobalSplitCandidate {
126 unsigned PhysReg;
127 SmallVector<IndexPair, 8> Interference;
128 BitVector LiveBundles;
129 };
130
131 /// Candidate info for for each PhysReg in AllocationOrder.
132 /// This vector never shrinks, but grows to the size of the largest register
133 /// class.
134 SmallVector<GlobalSplitCandidate, 32> GlobalCand;
135
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000136 /// For every instruction in SA->UseSlots, store the previous non-copy
137 /// instruction.
138 SmallVector<SlotIndex, 8> PrevSlot;
139
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000140public:
141 RAGreedy();
142
143 /// Return the pass name.
144 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000145 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000146 }
147
148 /// RAGreedy analysis usage.
149 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000150 virtual void releaseMemory();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000151 virtual Spiller &spiller() { return *SpillerInstance; }
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000152 virtual void enqueue(LiveInterval *LI);
153 virtual LiveInterval *dequeue();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000154 virtual unsigned selectOrSplit(LiveInterval&,
155 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000156
157 /// Perform register allocation.
158 virtual bool runOnMachineFunction(MachineFunction &mf);
159
160 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000161
162private:
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000163 void LRE_WillEraseInstruction(MachineInstr*);
164
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000165 bool checkUncachedInterference(LiveInterval&, unsigned);
166 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000167 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000168
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000169 void mapGlobalInterference(unsigned, SmallVectorImpl<IndexPair>&);
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000170 float calcSplitConstraints(const SmallVectorImpl<IndexPair>&);
171
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000172 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000173 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
174 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000175 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
176 SlotIndex getPrevMappedIndex(const MachineInstr*);
177 void calcPrevSlots();
178 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000179 bool canEvictInterference(LiveInterval&, unsigned, float&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000180
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000181 unsigned tryReassign(LiveInterval&, AllocationOrder&,
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000182 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000183 unsigned tryEvict(LiveInterval&, AllocationOrder&,
184 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000185 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
186 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000187 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
188 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000189 unsigned trySplit(LiveInterval&, AllocationOrder&,
190 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000191};
192} // end anonymous namespace
193
194char RAGreedy::ID = 0;
195
196FunctionPass* llvm::createGreedyRegisterAllocator() {
197 return new RAGreedy();
198}
199
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000200RAGreedy::RAGreedy(): MachineFunctionPass(ID), LRStage(RS_Original) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000201 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000202 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
203 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
204 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
205 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
206 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
207 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
208 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
209 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000210 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000211 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000212 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
213 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000214}
215
216void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
217 AU.setPreservesCFG();
218 AU.addRequired<AliasAnalysis>();
219 AU.addPreserved<AliasAnalysis>();
220 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000221 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000222 AU.addPreserved<SlotIndexes>();
223 if (StrongPHIElim)
224 AU.addRequiredID(StrongPHIEliminationID);
225 AU.addRequiredTransitive<RegisterCoalescer>();
226 AU.addRequired<CalculateSpillWeights>();
227 AU.addRequired<LiveStacks>();
228 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000229 AU.addRequired<MachineDominatorTree>();
230 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000231 AU.addRequired<MachineLoopInfo>();
232 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000233 AU.addRequired<MachineLoopRanges>();
234 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000235 AU.addRequired<VirtRegMap>();
236 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000237 AU.addRequired<EdgeBundles>();
238 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000239 MachineFunctionPass::getAnalysisUsage(AU);
240}
241
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000242
243//===----------------------------------------------------------------------===//
244// LiveRangeEdit delegate methods
245//===----------------------------------------------------------------------===//
246
247void RAGreedy::LRE_WillEraseInstruction(MachineInstr *MI) {
248 // LRE itself will remove from SlotIndexes and parent basic block.
249 VRM->RemoveMachineInstrFromMaps(MI);
250}
251
252
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000253void RAGreedy::releaseMemory() {
254 SpillerInstance.reset(0);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000255 LRStage.clear();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000256 RegAllocBase::releaseMemory();
257}
258
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000259void RAGreedy::enqueue(LiveInterval *LI) {
260 // Prioritize live ranges by size, assigning larger ranges first.
261 // The queue holds (size, reg) pairs.
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000262 const unsigned Size = LI->getSize();
263 const unsigned Reg = LI->reg;
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000264 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
265 "Can only enqueue virtual registers");
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000266 unsigned Prio;
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000267
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000268 LRStage.grow(Reg);
269 if (LRStage[Reg] == RS_Original)
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000270 // 1st generation ranges are handled first, long -> short.
271 Prio = (1u << 31) + Size;
272 else
273 // Repeat offenders are handled second, short -> long
274 Prio = (1u << 30) - Size;
Jakob Stoklund Olesend2a50732011-02-23 00:56:56 +0000275
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000276 // Boost ranges that have a physical register hint.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000277 const unsigned Hint = VRM->getRegAllocPref(Reg);
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +0000278 if (TargetRegisterInfo::isPhysicalRegister(Hint))
279 Prio |= (1u << 30);
280
281 Queue.push(std::make_pair(Prio, Reg));
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000282}
283
Jakob Stoklund Olesen98d96482011-02-22 23:01:52 +0000284LiveInterval *RAGreedy::dequeue() {
285 if (Queue.empty())
286 return 0;
287 LiveInterval *LI = &LIS->getInterval(Queue.top().second);
288 Queue.pop();
289 return LI;
290}
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000291
292//===----------------------------------------------------------------------===//
293// Register Reassignment
294//===----------------------------------------------------------------------===//
295
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000296// Check interference without using the cache.
297bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
298 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000299 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
300 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000301 if (subQ.checkInterference())
302 return true;
303 }
304 return false;
305}
306
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000307/// getSingleInterference - Return the single interfering virtual register
308/// assigned to PhysReg. Return 0 if more than one virtual register is
309/// interfering.
310LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
311 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000312 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000313 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000314 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000315 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
316 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000317 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000318 return 0;
Jakob Stoklund Olesen417df012011-02-23 00:29:55 +0000319 if (Q.collectInterferingVRegs(2) > 1)
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000320 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000321 Interference = Q.interferingVRegs().front();
322 }
323 }
324 return Interference;
325}
326
Andrew Trickb853e6c2010-12-09 18:15:21 +0000327// Attempt to reassign this virtual register to a different physical register.
328//
329// FIXME: we are not yet caching these "second-level" interferences discovered
330// in the sub-queries. These interferences can change with each call to
331// selectOrSplit. However, we could implement a "may-interfere" cache that
332// could be conservatively dirtied when we reassign or split.
333//
334// FIXME: This may result in a lot of alias queries. We could summarize alias
335// live intervals in their parent register's live union, but it's messy.
336bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000337 unsigned WantedPhysReg) {
338 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
339 "Can only reassign virtual registers");
340 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000341 "inconsistent phys reg assigment");
342
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000343 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
344 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000345 // Don't reassign to a WantedPhysReg alias.
346 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000347 continue;
348
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000349 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000350 continue;
351
Andrew Trickb853e6c2010-12-09 18:15:21 +0000352 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000353 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
354 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
355 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000356 unassign(InterferingVReg, OldAssign);
357 assign(InterferingVReg, PhysReg);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000358 ++NumReassigned;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000359 return true;
360 }
361 return false;
362}
363
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000364/// tryReassign - Try to reassign a single interference to a different physreg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000365/// @param VirtReg Currently unassigned virtual register.
366/// @param Order Physregs to try.
367/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000368unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order,
369 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000370 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000371
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000372 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000373 while (unsigned PhysReg = Order.next()) {
374 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
375 if (!InterferingVReg)
376 continue;
377 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
378 continue;
379 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000380 return PhysReg;
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000381 }
382 return 0;
383}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000384
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000385
386//===----------------------------------------------------------------------===//
387// Interference eviction
388//===----------------------------------------------------------------------===//
389
390/// canEvict - Return true if all interferences between VirtReg and PhysReg can
391/// be evicted. Set maxWeight to the maximal spill weight of an interference.
392bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000393 float &MaxWeight) {
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000394 float Weight = 0;
395 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
396 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
397 // If there is 10 or more interferences, chances are one is smaller.
398 if (Q.collectInterferingVRegs(10) >= 10)
399 return false;
400
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000401 // Check if any interfering live range is heavier than VirtReg.
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000402 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
403 LiveInterval *Intf = Q.interferingVRegs()[i];
404 if (TargetRegisterInfo::isPhysicalRegister(Intf->reg))
405 return false;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000406 if (Intf->weight >= VirtReg.weight)
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000407 return false;
408 Weight = std::max(Weight, Intf->weight);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000409 }
410 }
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000411 MaxWeight = Weight;
412 return true;
413}
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000414
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000415/// tryEvict - Try to evict all interferences for a physreg.
416/// @param VirtReg Currently unassigned virtual register.
417/// @param Order Physregs to try.
418/// @return Physreg to assign VirtReg, or 0.
419unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
420 AllocationOrder &Order,
421 SmallVectorImpl<LiveInterval*> &NewVRegs){
422 NamedRegionTimer T("Evict", TimerGroupName, TimePassesIsEnabled);
423
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000424 // Keep track of the lightest single interference seen so far.
425 float BestWeight = 0;
426 unsigned BestPhys = 0;
427
428 Order.rewind();
429 while (unsigned PhysReg = Order.next()) {
430 float Weight = 0;
Jakob Stoklund Olesend17924b2011-03-04 21:32:50 +0000431 if (!canEvictInterference(VirtReg, PhysReg, Weight))
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000432 continue;
433
434 // This is an eviction candidate.
435 DEBUG(dbgs() << "max " << PrintReg(PhysReg, TRI) << " interference = "
436 << Weight << '\n');
437 if (BestPhys && Weight >= BestWeight)
438 continue;
439
440 // Best so far.
441 BestPhys = PhysReg;
442 BestWeight = Weight;
Jakob Stoklund Olesen57f1e2c2011-02-25 01:04:22 +0000443 // Stop if the hint can be used.
444 if (Order.isHint(PhysReg))
445 break;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000446 }
447
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +0000448 if (!BestPhys)
449 return 0;
450
451 DEBUG(dbgs() << "evicting " << PrintReg(BestPhys, TRI) << " interference\n");
452 for (const unsigned *AliasI = TRI->getOverlaps(BestPhys); *AliasI; ++AliasI) {
453 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
454 assert(Q.seenAllInterferences() && "Didn't check all interfererences.");
455 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
456 LiveInterval *Intf = Q.interferingVRegs()[i];
457 unassign(*Intf, VRM->getPhys(Intf->reg));
458 ++NumEvicted;
459 NewVRegs.push_back(Intf);
460 }
461 }
462 return BestPhys;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000463}
464
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000465
466//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000467// Region Splitting
468//===----------------------------------------------------------------------===//
469
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000470/// mapGlobalInterference - Compute a map of the interference from PhysReg and
471/// its aliases in each block in SA->LiveBlocks.
472/// If LiveBlocks[i] is live-in, Ranges[i].first is the first interference.
473/// If LiveBlocks[i] is live-out, Ranges[i].second is the last interference.
474void RAGreedy::mapGlobalInterference(unsigned PhysReg,
475 SmallVectorImpl<IndexPair> &Ranges) {
476 Ranges.assign(SA->LiveBlocks.size(), IndexPair());
477 LiveInterval &VirtReg = const_cast<LiveInterval&>(SA->getParent());
478 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
479 if (!query(VirtReg, *AI).checkInterference())
480 continue;
481 LiveIntervalUnion::SegmentIter IntI =
482 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
483 if (!IntI.valid())
484 continue;
485 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
486 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
487 IndexPair &IP = Ranges[i];
488
489 // Skip interference-free blocks.
490 if (IntI.start() >= BI.Stop)
491 continue;
492
493 // First interference in block.
494 if (BI.LiveIn) {
495 IntI.advanceTo(BI.Start);
496 if (!IntI.valid())
497 break;
498 if (IntI.start() >= BI.Stop)
499 continue;
500 if (!IP.first.isValid() || IntI.start() < IP.first)
501 IP.first = IntI.start();
502 }
503
504 // Last interference in block.
505 if (BI.LiveOut) {
506 IntI.advanceTo(BI.Stop);
507 if (!IntI.valid() || IntI.start() >= BI.Stop)
508 --IntI;
509 if (IntI.stop() <= BI.Start)
510 continue;
511 if (!IP.second.isValid() || IntI.stop() > IP.second)
512 IP.second = IntI.stop();
513 }
514 }
515 }
516}
517
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000518/// calcSplitConstraints - Fill out the SplitConstraints vector based on the
519/// interference pattern in Intf. Return the static cost of this split,
520/// assuming that all preferences in SplitConstraints are met.
521float RAGreedy::calcSplitConstraints(const SmallVectorImpl<IndexPair> &Intf) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000522 // Reset interference dependent info.
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000523 SplitConstraints.resize(SA->LiveBlocks.size());
524 float StaticCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000525 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
526 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000527 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
528 IndexPair IP = Intf[i];
529
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000530 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000531 BC.Entry = (BI.Uses && BI.LiveIn) ?
532 SpillPlacement::PrefReg : SpillPlacement::DontCare;
533 BC.Exit = (BI.Uses && BI.LiveOut) ?
534 SpillPlacement::PrefReg : SpillPlacement::DontCare;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000535
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000536 // Number of spill code instructions to insert.
537 unsigned Ins = 0;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000538
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000539 // Interference for the live-in value.
540 if (IP.first.isValid()) {
541 if (IP.first <= BI.Start)
542 BC.Entry = SpillPlacement::MustSpill, Ins += BI.Uses;
543 else if (!BI.Uses)
544 BC.Entry = SpillPlacement::PrefSpill;
545 else if (IP.first < BI.FirstUse)
546 BC.Entry = SpillPlacement::PrefSpill, ++Ins;
547 else if (IP.first < (BI.LiveThrough ? BI.LastUse : BI.Kill))
548 ++Ins;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000549 }
550
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000551 // Interference for the live-out value.
552 if (IP.second.isValid()) {
553 if (IP.second >= BI.LastSplitPoint)
554 BC.Exit = SpillPlacement::MustSpill, Ins += BI.Uses;
555 else if (!BI.Uses)
556 BC.Exit = SpillPlacement::PrefSpill;
557 else if (IP.second > BI.LastUse)
558 BC.Exit = SpillPlacement::PrefSpill, ++Ins;
559 else if (IP.second > (BI.LiveThrough ? BI.FirstUse : BI.Def))
560 ++Ins;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000561 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000562
563 // Accumulate the total frequency of inserted spill code.
564 if (Ins)
565 StaticCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000566 }
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000567 return StaticCost;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000568}
569
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000570
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000571/// calcGlobalSplitCost - Return the global split cost of following the split
572/// pattern in LiveBundles. This cost should be added to the local cost of the
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000573/// interference pattern in SplitConstraints.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000574///
575float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
576 float GlobalCost = 0;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000577 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
578 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000579 SpillPlacement::BlockConstraint &BC = SplitConstraints[i];
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000580 bool RegIn = LiveBundles[Bundles->getBundle(BC.Number, 0)];
581 bool RegOut = LiveBundles[Bundles->getBundle(BC.Number, 1)];
582 unsigned Ins = 0;
583
584 if (!BI.Uses)
585 Ins += RegIn != RegOut;
586 else {
587 if (BI.LiveIn)
588 Ins += RegIn != (BC.Entry == SpillPlacement::PrefReg);
589 if (BI.LiveOut)
590 Ins += RegOut != (BC.Exit == SpillPlacement::PrefReg);
591 }
592 if (Ins)
593 GlobalCost += Ins * SpillPlacer->getBlockFrequency(BC.Number);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000594 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000595 return GlobalCost;
596}
597
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000598/// splitAroundRegion - Split VirtReg around the region determined by
599/// LiveBundles. Make an effort to avoid interference from PhysReg.
600///
601/// The 'register' interval is going to contain as many uses as possible while
602/// avoiding interference. The 'stack' interval is the complement constructed by
603/// SplitEditor. It will contain the rest.
604///
605void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
606 const BitVector &LiveBundles,
607 SmallVectorImpl<LiveInterval*> &NewVRegs) {
608 DEBUG({
609 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
610 << " with bundles";
611 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
612 dbgs() << " EB#" << i;
613 dbgs() << ".\n";
614 });
615
616 // First compute interference ranges in the live blocks.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000617 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesen8b6a9332011-03-04 22:11:11 +0000618 mapGlobalInterference(PhysReg, InterferenceRanges);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000619
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +0000620 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000621 SE->reset(LREdit);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000622
623 // Create the main cross-block interval.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000624 SE->openIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000625
626 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000627 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
628 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000629 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
630 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
631
632 // Should the register be live out?
633 if (!BI.LiveOut || !RegOut)
634 continue;
635
636 IndexPair &IP = InterferenceRanges[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000637 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000638 << Bundles->getBundle(BI.MBB->getNumber(), 1)
639 << " intf [" << IP.first << ';' << IP.second << ')');
640
641 // The interference interval should either be invalid or overlap MBB.
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000642 assert((!IP.first.isValid() || IP.first < BI.Stop) && "Bad interference");
643 assert((!IP.second.isValid() || IP.second > BI.Start)
644 && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000645
646 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000647 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000648 // Block is interference-free.
649 DEBUG(dbgs() << ", no interference");
650 if (!BI.Uses) {
651 assert(BI.LiveThrough && "No uses, but not live through block?");
652 // Block is live-through without interference.
653 DEBUG(dbgs() << ", no uses"
654 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
655 if (!RegIn)
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000656 SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000657 continue;
658 }
659 if (!BI.LiveThrough) {
660 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000661 SE->useIntv(SE->enterIntvBefore(BI.Def), BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000662 continue;
663 }
664 if (!RegIn) {
665 // Block is live-through, but entry bundle is on the stack.
666 // Reload just before the first use.
667 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000668 SE->useIntv(SE->enterIntvBefore(BI.FirstUse), BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000669 continue;
670 }
671 DEBUG(dbgs() << ", live-through.\n");
672 continue;
673 }
674
675 // Block has interference.
676 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000677
678 if (!BI.LiveThrough && IP.second <= BI.Def) {
679 // The interference doesn't reach the outgoing segment.
680 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000681 SE->useIntv(BI.Def, BI.Stop);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000682 continue;
683 }
684
685
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000686 if (!BI.Uses) {
687 // No uses in block, avoid interference by reloading as late as possible.
688 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000689 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000690 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000691 continue;
692 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000693
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000694 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000695 // There are interference-free uses at the end of the block.
696 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000697 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000698 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
699 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000700 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
701 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000702 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000703 // Only attempt a split befroe the last split point.
704 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
705 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000706 SlotIndex SegStart = SE->enterIntvBefore(Use);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000707 assert(SegStart >= IP.second && "Couldn't avoid interference");
708 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000709 SE->useIntv(SegStart, BI.Stop);
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000710 continue;
711 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000712 }
713
714 // Interference is after the last use.
715 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000716 SlotIndex SegStart = SE->enterIntvAtEnd(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000717 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000718 }
719
720 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000721 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
722 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000723 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
724 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
725
726 // Is the register live-in?
727 if (!BI.LiveIn || !RegIn)
728 continue;
729
730 // We have an incoming register. Check for interference.
731 IndexPair &IP = InterferenceRanges[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000732
733 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
734 << " -> BB#" << BI.MBB->getNumber());
735
736 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000737 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000738 // Block is interference-free.
739 DEBUG(dbgs() << ", no interference");
740 if (!BI.Uses) {
741 assert(BI.LiveThrough && "No uses, but not live through block?");
742 // Block is live-through without interference.
743 if (RegOut) {
744 DEBUG(dbgs() << ", no uses, live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000745 SE->useIntv(BI.Start, BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000746 } else {
747 DEBUG(dbgs() << ", no uses, stack-out.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000748 SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000749 }
750 continue;
751 }
752 if (!BI.LiveThrough) {
753 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000754 SE->useIntv(BI.Start, SE->leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000755 continue;
756 }
757 if (!RegOut) {
758 // Block is live-through, but exit bundle is on the stack.
759 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000760 if (BI.LastUse < BI.LastSplitPoint) {
761 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000762 SE->useIntv(BI.Start, SE->leaveIntvAfter(BI.LastUse));
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000763 continue;
764 }
765 // The last use is after the last split point, it is probably an
766 // indirect jump.
767 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
768 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000769 SlotIndex SegEnd = SE->leaveIntvBefore(BI.LastSplitPoint);
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000770 SE->useIntv(BI.Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000771 // Run a double interval from the split to the last use.
772 // This makes it possible to spill the complement without affecting the
773 // indirect branch.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000774 SE->overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000775 continue;
776 }
777 // Register is live-through.
778 DEBUG(dbgs() << ", uses, live-through.\n");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000779 SE->useIntv(BI.Start, BI.Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000780 continue;
781 }
782
783 // Block has interference.
784 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000785
786 if (!BI.LiveThrough && IP.first >= BI.Kill) {
787 // The interference doesn't reach the outgoing segment.
788 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000789 SE->useIntv(BI.Start, BI.Kill);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000790 continue;
791 }
792
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000793 if (!BI.Uses) {
794 // No uses in block, avoid interference by spilling as soon as possible.
795 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000796 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000797 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000798 continue;
799 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000800 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000801 // There are interference-free uses at the beginning of the block.
802 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000803 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000804 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
805 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000806 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
807 SlotIndex Use = (--UI)->getBoundaryIndex();
808 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000809 SlotIndex SegEnd = SE->leaveIntvAfter(Use);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000810 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesen36d61862011-03-03 03:41:29 +0000811 SE->useIntv(BI.Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000812 continue;
813 }
814
815 // Interference is before the first use.
816 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000817 SlotIndex SegEnd = SE->leaveIntvAtTop(*BI.MBB);
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000818 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000819 }
820
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000821 SE->closeIntv();
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000822
823 // FIXME: Should we be more aggressive about splitting the stack region into
824 // per-block segments? The current approach allows the stack region to
825 // separate into connected components. Some components may be allocatable.
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +0000826 SE->finish();
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +0000827 ++NumGlobalSplits;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000828
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000829 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000830 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000831
832#ifndef NDEBUG
833 // Make sure that at least one of the new intervals can allocate to PhysReg.
834 // That was the whole point of splitting the live range.
835 bool found = false;
836 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
837 ++I)
838 if (!checkUncachedInterference(**I, PhysReg)) {
839 found = true;
840 break;
841 }
842 assert(found && "No allocatable intervals after pointless splitting");
843#endif
844 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000845}
846
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000847unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
848 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000849 BitVector LiveBundles, BestBundles;
850 float BestCost = 0;
851 unsigned BestReg = 0;
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000852
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000853 Order.rewind();
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000854 for (unsigned Cand = 0; unsigned PhysReg = Order.next(); ++Cand) {
855 if (GlobalCand.size() <= Cand)
856 GlobalCand.resize(Cand+1);
857 GlobalCand[Cand].PhysReg = PhysReg;
858
859 mapGlobalInterference(PhysReg, GlobalCand[Cand].Interference);
860 float Cost = calcSplitConstraints(GlobalCand[Cand].Interference);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000861 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << "\tstatic = " << Cost);
862 if (BestReg && Cost >= BestCost) {
863 DEBUG(dbgs() << " higher.\n");
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000864 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000865 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000866
Jakob Stoklund Olesen96dcd952011-03-05 01:10:31 +0000867 SpillPlacer->placeSpills(SplitConstraints, LiveBundles);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000868 // No live bundles, defer to splitSingleBlocks().
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000869 if (!LiveBundles.any()) {
870 DEBUG(dbgs() << " no bundles.\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000871 continue;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000872 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000873
874 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000875 DEBUG({
876 dbgs() << ", total = " << Cost << " with bundles";
877 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
878 dbgs() << " EB#" << i;
879 dbgs() << ".\n";
880 });
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000881 if (!BestReg || Cost < BestCost) {
882 BestReg = PhysReg;
Jakob Stoklund Olesen874be742011-03-05 03:28:51 +0000883 BestCost = 0.98f * Cost; // Prevent rounding effects.
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000884 BestBundles.swap(LiveBundles);
885 }
886 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000887
888 if (!BestReg)
889 return 0;
890
891 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +0000892 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Region);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000893 return 0;
894}
895
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000896
897//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000898// Local Splitting
899//===----------------------------------------------------------------------===//
900
901
902/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
903/// in order to use PhysReg between two entries in SA->UseSlots.
904///
905/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
906///
907void RAGreedy::calcGapWeights(unsigned PhysReg,
908 SmallVectorImpl<float> &GapWeight) {
909 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
910 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
911 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
912 const unsigned NumGaps = Uses.size()-1;
913
914 // Start and end points for the interference check.
915 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
916 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
917
918 GapWeight.assign(NumGaps, 0.0f);
919
920 // Add interference from each overlapping register.
921 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
922 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
923 .checkInterference())
924 continue;
925
926 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
927 // so we don't need InterferenceQuery.
928 //
929 // Interference that overlaps an instruction is counted in both gaps
930 // surrounding the instruction. The exception is interference before
931 // StartIdx and after StopIdx.
932 //
933 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
934 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
935 // Skip the gaps before IntI.
936 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
937 if (++Gap == NumGaps)
938 break;
939 if (Gap == NumGaps)
940 break;
941
942 // Update the gaps covered by IntI.
943 const float weight = IntI.value()->weight;
944 for (; Gap != NumGaps; ++Gap) {
945 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
946 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
947 break;
948 }
949 if (Gap == NumGaps)
950 break;
951 }
952 }
953}
954
955/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
956/// before MI that has a slot index. If MI is the first mapped instruction in
957/// its block, return the block start index instead.
958///
959SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
960 assert(MI && "Missing MachineInstr");
961 const MachineBasicBlock *MBB = MI->getParent();
962 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
963 while (I != B)
964 if (!(--I)->isDebugValue() && !I->isCopy())
965 return Indexes->getInstructionIndex(I);
966 return Indexes->getMBBStartIdx(MBB);
967}
968
969/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
970/// real non-copy instruction for each instruction in SA->UseSlots.
971///
972void RAGreedy::calcPrevSlots() {
973 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
974 PrevSlot.clear();
975 PrevSlot.reserve(Uses.size());
976 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
977 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
978 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
979 }
980}
981
982/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
983/// be beneficial to split before UseSlots[i].
984///
985/// 0 is always a valid split point
986unsigned RAGreedy::nextSplitPoint(unsigned i) {
987 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
988 const unsigned Size = Uses.size();
989 assert(i != Size && "No split points after the end");
990 // Allow split before i when Uses[i] is not adjacent to the previous use.
991 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
992 ;
993 return i;
994}
995
996/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
997/// basic block.
998///
999unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1000 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1001 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
1002 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
1003
1004 // Note that it is possible to have an interval that is live-in or live-out
1005 // while only covering a single block - A phi-def can use undef values from
1006 // predecessors, and the block could be a single-block loop.
1007 // We don't bother doing anything clever about such a case, we simply assume
1008 // that the interval is continuous from FirstUse to LastUse. We should make
1009 // sure that we don't do anything illegal to such an interval, though.
1010
1011 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
1012 if (Uses.size() <= 2)
1013 return 0;
1014 const unsigned NumGaps = Uses.size()-1;
1015
1016 DEBUG({
1017 dbgs() << "tryLocalSplit: ";
1018 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
1019 dbgs() << ' ' << SA->UseSlots[i];
1020 dbgs() << '\n';
1021 });
1022
1023 // For every use, find the previous mapped non-copy instruction.
1024 // We use this to detect valid split points, and to estimate new interval
1025 // sizes.
1026 calcPrevSlots();
1027
1028 unsigned BestBefore = NumGaps;
1029 unsigned BestAfter = 0;
1030 float BestDiff = 0;
1031
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +00001032 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB->getNumber());
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001033 SmallVector<float, 8> GapWeight;
1034
1035 Order.rewind();
1036 while (unsigned PhysReg = Order.next()) {
1037 // Keep track of the largest spill weight that would need to be evicted in
1038 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
1039 calcGapWeights(PhysReg, GapWeight);
1040
1041 // Try to find the best sequence of gaps to close.
1042 // The new spill weight must be larger than any gap interference.
1043
1044 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
1045 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
1046
1047 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
1048 // It is the spill weight that needs to be evicted.
1049 float MaxGap = GapWeight[0];
1050 for (unsigned i = 1; i != SplitAfter; ++i)
1051 MaxGap = std::max(MaxGap, GapWeight[i]);
1052
1053 for (;;) {
1054 // Live before/after split?
1055 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1056 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1057
1058 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
1059 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
1060 << " i=" << MaxGap);
1061
1062 // Stop before the interval gets so big we wouldn't be making progress.
1063 if (!LiveBefore && !LiveAfter) {
1064 DEBUG(dbgs() << " all\n");
1065 break;
1066 }
1067 // Should the interval be extended or shrunk?
1068 bool Shrink = true;
1069 if (MaxGap < HUGE_VALF) {
1070 // Estimate the new spill weight.
1071 //
1072 // Each instruction reads and writes the register, except the first
1073 // instr doesn't read when !FirstLive, and the last instr doesn't write
1074 // when !LastLive.
1075 //
1076 // We will be inserting copies before and after, so the total number of
1077 // reads and writes is 2 * EstUses.
1078 //
1079 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1080 2*(LiveBefore + LiveAfter);
1081
1082 // Try to guess the size of the new interval. This should be trivial,
1083 // but the slot index of an inserted copy can be a lot smaller than the
1084 // instruction it is inserted before if there are many dead indexes
1085 // between them.
1086 //
1087 // We measure the distance from the instruction before SplitBefore to
1088 // get a conservative estimate.
1089 //
1090 // The final distance can still be different if inserting copies
1091 // triggers a slot index renumbering.
1092 //
1093 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1094 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1095 // Would this split be possible to allocate?
1096 // Never allocate all gaps, we wouldn't be making progress.
1097 float Diff = EstWeight - MaxGap;
1098 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1099 if (Diff > 0) {
1100 Shrink = false;
1101 if (Diff > BestDiff) {
1102 DEBUG(dbgs() << " (best)");
1103 BestDiff = Diff;
1104 BestBefore = SplitBefore;
1105 BestAfter = SplitAfter;
1106 }
1107 }
1108 }
1109
1110 // Try to shrink.
1111 if (Shrink) {
1112 SplitBefore = nextSplitPoint(SplitBefore);
1113 if (SplitBefore < SplitAfter) {
1114 DEBUG(dbgs() << " shrink\n");
1115 // Recompute the max when necessary.
1116 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1117 MaxGap = GapWeight[SplitBefore];
1118 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1119 MaxGap = std::max(MaxGap, GapWeight[i]);
1120 }
1121 continue;
1122 }
1123 MaxGap = 0;
1124 }
1125
1126 // Try to extend the interval.
1127 if (SplitAfter >= NumGaps) {
1128 DEBUG(dbgs() << " end\n");
1129 break;
1130 }
1131
1132 DEBUG(dbgs() << " extend\n");
1133 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1134 SplitAfter != e; ++SplitAfter)
1135 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1136 continue;
1137 }
1138 }
1139
1140 // Didn't find any candidates?
1141 if (BestBefore == NumGaps)
1142 return 0;
1143
1144 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1145 << '-' << Uses[BestAfter] << ", " << BestDiff
1146 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1147
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001148 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001149 SE->reset(LREdit);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001150
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001151 SE->openIntv();
1152 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1153 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1154 SE->useIntv(SegStart, SegStop);
1155 SE->closeIntv();
1156 SE->finish();
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001157 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Local);
Jakob Stoklund Olesen0db841f2011-02-17 22:53:48 +00001158 ++NumLocalSplits;
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001159
1160 return 0;
1161}
1162
1163//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001164// Live Range Splitting
1165//===----------------------------------------------------------------------===//
1166
1167/// trySplit - Try to split VirtReg or one of its interferences, making it
1168/// assignable.
1169/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1170unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1171 SmallVectorImpl<LiveInterval*>&NewVRegs) {
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001172 // Local intervals are handled separately.
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001173 if (LIS->intervalIsInOneMBB(VirtReg)) {
1174 NamedRegionTimer T("Local Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001175 SA->analyze(&VirtReg);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001176 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesena2ebf602011-02-19 00:38:40 +00001177 }
1178
1179 NamedRegionTimer T("Global Splitting", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001180
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001181 // Don't iterate global splitting.
1182 // Move straight to spilling if this range was produced by a global split.
1183 LiveRangeStage Stage = getStage(VirtReg);
1184 if (Stage >= RS_Block)
1185 return 0;
1186
1187 SA->analyze(&VirtReg);
1188
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001189 // First try to split around a region spanning multiple blocks.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001190 if (Stage < RS_Region) {
1191 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1192 if (PhysReg || !NewVRegs.empty())
1193 return PhysReg;
1194 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001195
1196 // Then isolate blocks with multiple uses.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001197 if (Stage < RS_Block) {
1198 SplitAnalysis::BlockPtrSet Blocks;
1199 if (SA->getMultiUseBlocks(Blocks)) {
Jakob Stoklund Olesen92a55f42011-03-09 00:57:29 +00001200 LiveRangeEdit LREdit(VirtReg, NewVRegs, this);
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001201 SE->reset(LREdit);
1202 SE->splitSingleBlocks(Blocks);
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001203 setStage(NewVRegs.begin(), NewVRegs.end(), RS_Block);
1204 if (VerifyEnabled)
1205 MF->verify(this, "After splitting live range around basic blocks");
1206 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001207 }
1208
1209 // Don't assign any physregs.
1210 return 0;
1211}
1212
1213
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001214//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001215// Main Entry Point
1216//===----------------------------------------------------------------------===//
1217
1218unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001219 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001220 LiveRangeStage Stage = getStage(VirtReg);
1221 if (Stage == RS_Original)
1222 LRStage[VirtReg.reg] = RS_Second;
1223
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001224 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001225 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1226 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001227 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001228 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001229 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001230
Jakob Stoklund Olesen98c81412011-02-23 00:29:52 +00001231 if (unsigned PhysReg = tryReassign(VirtReg, Order, NewVRegs))
1232 return PhysReg;
1233
1234 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001235 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001236
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001237 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1238
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001239 // The first time we see a live range, don't try to split or spill.
1240 // Wait until the second time, when all smaller ranges have been allocated.
1241 // This gives a better picture of the interference to split around.
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001242 if (Stage == RS_Original) {
Jakob Stoklund Olesen107d3662011-02-24 23:21:36 +00001243 NewVRegs.push_back(&VirtReg);
1244 return 0;
1245 }
1246
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001247 assert(Stage < RS_Spill && "Cannot allocate after spilling");
1248
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001249 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001250 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1251 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001252 return PhysReg;
1253
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001254 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001255 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen38f6bd02011-03-10 01:21:58 +00001256 spiller().spill(&VirtReg, NewVRegs, 0);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001257
1258 // The live virtual register requesting allocation was spilled, so tell
1259 // the caller not to allocate anything during this round.
1260 return 0;
1261}
1262
1263bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1264 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1265 << "********** Function: "
1266 << ((Value*)mf.getFunction())->getName() << '\n');
1267
1268 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001269 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001270 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001271
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001272 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001273 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001274 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001275 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001276 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001277 Loops = &getAnalysis<MachineLoopInfo>();
1278 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001279 Bundles = &getAnalysis<EdgeBundles>();
1280 SpillPlacer = &getAnalysis<SpillPlacement>();
1281
Jakob Stoklund Olesen1b847de2011-02-19 00:53:42 +00001282 SA.reset(new SplitAnalysis(*VRM, *LIS, *Loops));
Jakob Stoklund Olesenbece06f2011-03-03 01:29:13 +00001283 SE.reset(new SplitEditor(*SA, *LIS, *VRM, *DomTree));
Jakob Stoklund Olesen22a1df62011-03-01 21:10:07 +00001284 LRStage.clear();
1285 LRStage.resize(MRI->getNumVirtRegs());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001286
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001287 allocatePhysRegs();
1288 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001289 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001290
1291 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001292 {
1293 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesenba05c012011-02-18 22:03:18 +00001294 VRM->rewrite(Indexes);
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001295 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001296
1297 // The pass output is in VirtRegMap. Release all the transient data.
1298 releaseMemory();
1299
1300 return true;
1301}