blob: 840f05b9ff44ca2c0015b186df8349989f689d74 [file] [log] [blame]
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +00001//===-- SpillPlacement.cpp - Optimal Spill Code Placement -----------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the spill code placement analysis.
11//
12// Each edge bundle corresponds to a node in a Hopfield network. Constraints on
13// basic blocks are weighted by the block frequency and added to become the node
14// bias.
15//
16// Transparent basic blocks have the variable live through, but don't care if it
17// is spilled or in a register. These blocks become connections in the Hopfield
18// network, again weighted by block frequency.
19//
20// The Hopfield network minimizes (possibly locally) its energy function:
21//
22// E = -sum_n V_n * ( B_n + sum_{n, m linked by b} V_m * F_b )
23//
24// The energy function represents the expected spill code execution frequency,
25// or the cost of spilling. This is a Lyapunov function which never increases
26// when a node is updated. It is guaranteed to converge to a local minimum.
27//
28//===----------------------------------------------------------------------===//
29
Jakob Stoklund Olesenfc7d7752011-01-19 23:14:59 +000030#define DEBUG_TYPE "spillplacement"
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000031#include "SpillPlacement.h"
Jakub Staszakf31034d2013-03-18 23:45:45 +000032#include "llvm/ADT/BitVector.h"
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000033#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000034#include "llvm/CodeGen/MachineBasicBlock.h"
Benjamin Kramer4eed7562013-06-17 19:00:36 +000035#include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000036#include "llvm/CodeGen/MachineFunction.h"
37#include "llvm/CodeGen/MachineLoopInfo.h"
38#include "llvm/CodeGen/Passes.h"
39#include "llvm/Support/Debug.h"
40#include "llvm/Support/Format.h"
41
42using namespace llvm;
43
44char SpillPlacement::ID = 0;
45INITIALIZE_PASS_BEGIN(SpillPlacement, "spill-code-placement",
46 "Spill Code Placement Analysis", true, true)
47INITIALIZE_PASS_DEPENDENCY(EdgeBundles)
48INITIALIZE_PASS_DEPENDENCY(MachineLoopInfo)
49INITIALIZE_PASS_END(SpillPlacement, "spill-code-placement",
50 "Spill Code Placement Analysis", true, true)
51
52char &llvm::SpillPlacementID = SpillPlacement::ID;
53
54void SpillPlacement::getAnalysisUsage(AnalysisUsage &AU) const {
55 AU.setPreservesAll();
Benjamin Kramer4eed7562013-06-17 19:00:36 +000056 AU.addRequired<MachineBlockFrequencyInfo>();
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000057 AU.addRequiredTransitive<EdgeBundles>();
58 AU.addRequiredTransitive<MachineLoopInfo>();
59 MachineFunctionPass::getAnalysisUsage(AU);
60}
61
62/// Node - Each edge bundle corresponds to a Hopfield node.
63///
64/// The node contains precomputed frequency data that only depends on the CFG,
65/// but Bias and Links are computed each time placeSpills is called.
66///
67/// The node Value is positive when the variable should be in a register. The
68/// value can change when linked nodes change, but convergence is very fast
69/// because all weights are positive.
70///
71struct SpillPlacement::Node {
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +000072 /// Scale - Inverse block frequency feeding into[0] or out of[1] the bundle.
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000073 /// Ideally, these two numbers should be identical, but inaccuracies in the
74 /// block frequency estimates means that we need to normalize ingoing and
75 /// outgoing frequencies separately so they are commensurate.
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +000076 float Scale[2];
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +000077
78 /// Bias - Normalized contributions from non-transparent blocks.
79 /// A bundle connected to a MustSpill block has a huge negative bias,
80 /// otherwise it is a number in the range [-2;2].
81 float Bias;
82
83 /// Value - Output value of this node computed from the Bias and links.
84 /// This is always in the range [-1;1]. A positive number means the variable
85 /// should go in a register through this bundle.
86 float Value;
87
88 typedef SmallVector<std::pair<float, unsigned>, 4> LinkVector;
89
90 /// Links - (Weight, BundleNo) for all transparent blocks connecting to other
91 /// bundles. The weights are all positive and add up to at most 2, weights
92 /// from ingoing and outgoing nodes separately add up to a most 1. The weight
93 /// sum can be less than 2 when the variable is not live into / out of some
94 /// connected basic blocks.
95 LinkVector Links;
96
97 /// preferReg - Return true when this node prefers to be in a register.
98 bool preferReg() const {
99 // Undecided nodes (Value==0) go on the stack.
100 return Value > 0;
101 }
102
103 /// mustSpill - Return True if this node is so biased that it must spill.
104 bool mustSpill() const {
105 // Actually, we must spill if Bias < sum(weights).
106 // It may be worth it to compute the weight sum here?
107 return Bias < -2.0f;
108 }
109
110 /// Node - Create a blank Node.
111 Node() {
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +0000112 Scale[0] = Scale[1] = 0;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000113 }
114
115 /// clear - Reset per-query data, but preserve frequencies that only depend on
116 // the CFG.
117 void clear() {
118 Bias = Value = 0;
119 Links.clear();
120 }
121
122 /// addLink - Add a link to bundle b with weight w.
123 /// out=0 for an ingoing link, and 1 for an outgoing link.
124 void addLink(unsigned b, float w, bool out) {
125 // Normalize w relative to all connected blocks from that direction.
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +0000126 w *= Scale[out];
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000127
128 // There can be multiple links to the same bundle, add them up.
129 for (LinkVector::iterator I = Links.begin(), E = Links.end(); I != E; ++I)
130 if (I->second == b) {
131 I->first += w;
132 return;
133 }
134 // This must be the first link to b.
135 Links.push_back(std::make_pair(w, b));
136 }
137
138 /// addBias - Bias this node from an ingoing[0] or outgoing[1] link.
Jakob Stoklund Olesen70d43702011-04-06 19:14:00 +0000139 /// Return the change to the total number of positive biases.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000140 void addBias(float w, bool out) {
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000141 // Normalize w relative to all connected blocks from that direction.
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +0000142 w *= Scale[out];
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000143 Bias += w;
144 }
145
146 /// update - Recompute Value from Bias and Links. Return true when node
147 /// preference changes.
148 bool update(const Node nodes[]) {
149 // Compute the weighted sum of inputs.
150 float Sum = Bias;
151 for (LinkVector::iterator I = Links.begin(), E = Links.end(); I != E; ++I)
152 Sum += I->first * nodes[I->second].Value;
153
154 // The weighted sum is going to be in the range [-2;2]. Ideally, we should
155 // simply set Value = sign(Sum), but we will add a dead zone around 0 for
156 // two reasons:
157 // 1. It avoids arbitrary bias when all links are 0 as is possible during
158 // initial iterations.
159 // 2. It helps tame rounding errors when the links nominally sum to 0.
Jakob Stoklund Olesen9590c7f2011-02-03 17:04:12 +0000160 const float Thres = 1e-4f;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000161 bool Before = preferReg();
162 if (Sum < -Thres)
163 Value = -1;
164 else if (Sum > Thres)
165 Value = 1;
166 else
167 Value = 0;
168 return Before != preferReg();
169 }
170};
171
172bool SpillPlacement::runOnMachineFunction(MachineFunction &mf) {
173 MF = &mf;
174 bundles = &getAnalysis<EdgeBundles>();
175 loops = &getAnalysis<MachineLoopInfo>();
176
177 assert(!nodes && "Leaking node array");
178 nodes = new Node[bundles->getNumBundles()];
179
180 // Compute total ingoing and outgoing block frequencies for all bundles.
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +0000181 BlockFrequency.resize(mf.getNumBlockIDs());
Benjamin Kramer4eed7562013-06-17 19:00:36 +0000182 MachineBlockFrequencyInfo &MBFI = getAnalysis<MachineBlockFrequencyInfo>();
183 float EntryFreq = BlockFrequency::getEntryFrequency();
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000184 for (MachineFunction::iterator I = mf.begin(), E = mf.end(); I != E; ++I) {
Benjamin Kramer4eed7562013-06-17 19:00:36 +0000185 float Freq = MBFI.getBlockFreq(I).getFrequency() / EntryFreq;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000186 unsigned Num = I->getNumber();
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +0000187 BlockFrequency[Num] = Freq;
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +0000188 nodes[bundles->getBundle(Num, 1)].Scale[0] += Freq;
189 nodes[bundles->getBundle(Num, 0)].Scale[1] += Freq;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000190 }
191
Jakob Stoklund Olesen0bd2bd92011-04-07 17:27:48 +0000192 // Scales are reciprocal frequencies.
193 for (unsigned i = 0, e = bundles->getNumBundles(); i != e; ++i)
194 for (unsigned d = 0; d != 2; ++d)
195 if (nodes[i].Scale[d] > 0)
196 nodes[i].Scale[d] = 1 / nodes[i].Scale[d];
197
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000198 // We never change the function.
199 return false;
200}
201
202void SpillPlacement::releaseMemory() {
203 delete[] nodes;
204 nodes = 0;
205}
206
207/// activate - mark node n as active if it wasn't already.
208void SpillPlacement::activate(unsigned n) {
209 if (ActiveNodes->test(n))
210 return;
211 ActiveNodes->set(n);
212 nodes[n].clear();
Jakob Stoklund Olesen1dc12aa2012-05-21 03:11:23 +0000213
214 // Very large bundles usually come from big switches, indirect branches,
215 // landing pads, or loops with many 'continue' statements. It is difficult to
216 // allocate registers when so many different blocks are involved.
217 //
218 // Give a small negative bias to large bundles such that 1/32 of the
219 // connected blocks need to be interested before we consider expanding the
220 // region through the bundle. This helps compile time by limiting the number
221 // of blocks visited and the number of links in the Hopfield network.
222 if (bundles->getBlocks(n).size() > 100)
223 nodes[n].Bias = -0.0625f;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000224}
225
226
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000227/// addConstraints - Compute node biases and weights from a set of constraints.
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000228/// Set a bit in NodeMask for each active node.
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000229void SpillPlacement::addConstraints(ArrayRef<BlockConstraint> LiveBlocks) {
230 for (ArrayRef<BlockConstraint>::iterator I = LiveBlocks.begin(),
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000231 E = LiveBlocks.end(); I != E; ++I) {
Jakob Stoklund Olesen40a42a22011-03-04 00:58:40 +0000232 float Freq = getBlockFrequency(I->Number);
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000233 const float Bias[] = {
234 0, // DontCare,
235 1, // PrefReg,
236 -1, // PrefSpill
Jakob Stoklund Olesen0e0a8802011-08-02 21:53:03 +0000237 0, // PrefBoth
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000238 -HUGE_VALF // MustSpill
239 };
240
241 // Live-in to block?
242 if (I->Entry != DontCare) {
243 unsigned ib = bundles->getBundle(I->Number, 0);
244 activate(ib);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000245 nodes[ib].addBias(Freq * Bias[I->Entry], 1);
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000246 }
247
248 // Live-out from block?
249 if (I->Exit != DontCare) {
250 unsigned ob = bundles->getBundle(I->Number, 1);
251 activate(ob);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000252 nodes[ob].addBias(Freq * Bias[I->Exit], 0);
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000253 }
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000254 }
255}
256
Jakob Stoklund Olesene60f1032011-07-23 03:10:19 +0000257/// addPrefSpill - Same as addConstraints(PrefSpill)
Jakob Stoklund Olesenb87f91b2011-08-03 23:09:38 +0000258void SpillPlacement::addPrefSpill(ArrayRef<unsigned> Blocks, bool Strong) {
Jakob Stoklund Olesene60f1032011-07-23 03:10:19 +0000259 for (ArrayRef<unsigned>::iterator I = Blocks.begin(), E = Blocks.end();
260 I != E; ++I) {
261 float Freq = getBlockFrequency(*I);
Jakob Stoklund Olesenb87f91b2011-08-03 23:09:38 +0000262 if (Strong)
263 Freq += Freq;
Jakob Stoklund Olesene60f1032011-07-23 03:10:19 +0000264 unsigned ib = bundles->getBundle(*I, 0);
265 unsigned ob = bundles->getBundle(*I, 1);
266 activate(ib);
267 activate(ob);
268 nodes[ib].addBias(-Freq, 1);
269 nodes[ob].addBias(-Freq, 0);
270 }
271}
272
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000273void SpillPlacement::addLinks(ArrayRef<unsigned> Links) {
274 for (ArrayRef<unsigned>::iterator I = Links.begin(), E = Links.end(); I != E;
275 ++I) {
276 unsigned Number = *I;
277 unsigned ib = bundles->getBundle(Number, 0);
278 unsigned ob = bundles->getBundle(Number, 1);
279
280 // Ignore self-loops.
281 if (ib == ob)
282 continue;
283 activate(ib);
284 activate(ob);
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000285 if (nodes[ib].Links.empty() && !nodes[ib].mustSpill())
286 Linked.push_back(ib);
287 if (nodes[ob].Links.empty() && !nodes[ob].mustSpill())
288 Linked.push_back(ob);
Jakob Stoklund Olesen7b41fbe2011-04-07 17:27:46 +0000289 float Freq = getBlockFrequency(Number);
290 nodes[ib].addLink(ob, Freq, 1);
291 nodes[ob].addLink(ib, Freq, 0);
292 }
293}
294
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000295bool SpillPlacement::scanActiveBundles() {
296 Linked.clear();
297 RecentPositive.clear();
298 for (int n = ActiveNodes->find_first(); n>=0; n = ActiveNodes->find_next(n)) {
299 nodes[n].update(nodes);
300 // A node that must spill, or a node without any links is not going to
301 // change its value ever again, so exclude it from iterations.
302 if (nodes[n].mustSpill())
303 continue;
304 if (!nodes[n].Links.empty())
305 Linked.push_back(n);
306 if (nodes[n].preferReg())
307 RecentPositive.push_back(n);
308 }
309 return !RecentPositive.empty();
310}
311
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000312/// iterate - Repeatedly update the Hopfield nodes until stability or the
313/// maximum number of iterations is reached.
314/// @param Linked - Numbers of linked nodes that need updating.
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000315void SpillPlacement::iterate() {
316 // First update the recently positive nodes. They have likely received new
317 // negative bias that will turn them off.
318 while (!RecentPositive.empty())
319 nodes[RecentPositive.pop_back_val()].update(nodes);
320
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000321 if (Linked.empty())
322 return;
323
324 // Run up to 10 iterations. The edge bundle numbering is closely related to
325 // basic block numbering, so there is a strong tendency towards chains of
326 // linked nodes with sequential numbers. By scanning the linked nodes
327 // backwards and forwards, we make it very likely that a single node can
328 // affect the entire network in a single iteration. That means very fast
329 // convergence, usually in a single iteration.
330 for (unsigned iteration = 0; iteration != 10; ++iteration) {
331 // Scan backwards, skipping the last node which was just updated.
332 bool Changed = false;
333 for (SmallVectorImpl<unsigned>::const_reverse_iterator I =
334 llvm::next(Linked.rbegin()), E = Linked.rend(); I != E; ++I) {
335 unsigned n = *I;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000336 if (nodes[n].update(nodes)) {
337 Changed = true;
338 if (nodes[n].preferReg())
339 RecentPositive.push_back(n);
340 }
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000341 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000342 if (!Changed || !RecentPositive.empty())
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000343 return;
344
345 // Scan forwards, skipping the first node which was just updated.
346 Changed = false;
347 for (SmallVectorImpl<unsigned>::const_iterator I =
348 llvm::next(Linked.begin()), E = Linked.end(); I != E; ++I) {
349 unsigned n = *I;
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000350 if (nodes[n].update(nodes)) {
351 Changed = true;
352 if (nodes[n].preferReg())
353 RecentPositive.push_back(n);
354 }
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000355 }
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000356 if (!Changed || !RecentPositive.empty())
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000357 return;
358 }
359}
360
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000361void SpillPlacement::prepare(BitVector &RegBundles) {
Jakob Stoklund Olesenf4afdfc2011-04-09 02:59:09 +0000362 Linked.clear();
363 RecentPositive.clear();
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000364 // Reuse RegBundles as our ActiveNodes vector.
365 ActiveNodes = &RegBundles;
366 ActiveNodes->clear();
367 ActiveNodes->resize(bundles->getNumBundles());
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000368}
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000369
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000370bool
371SpillPlacement::finish() {
372 assert(ActiveNodes && "Call prepare() first");
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000373
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000374 // Write preferences back to ActiveNodes.
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000375 bool Perfect = true;
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000376 for (int n = ActiveNodes->find_first(); n>=0; n = ActiveNodes->find_next(n))
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000377 if (!nodes[n].preferReg()) {
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000378 ActiveNodes->reset(n);
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000379 Perfect = false;
380 }
Jakob Stoklund Olesen9efa2a22011-04-06 19:13:57 +0000381 ActiveNodes = 0;
Jakob Stoklund Olesen8bfe5082011-01-06 01:21:53 +0000382 return Perfect;
383}