blob: c2adb7caad60571cdb5f0e8e95371ccf3a7b1407 [file] [log] [blame]
Chris Lattner946b2552004-04-18 05:20:17 +00001//===-- LoopUnroll.cpp - Loop unroller pass -------------------------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
Chris Lattner946b2552004-04-18 05:20:17 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
Chris Lattner946b2552004-04-18 05:20:17 +00008//===----------------------------------------------------------------------===//
9//
10// This pass implements a simple loop unroller. It works best when loops have
11// been canonicalized by the -indvars pass, allowing it to determine the trip
12// counts of loops easily.
Chris Lattner946b2552004-04-18 05:20:17 +000013//===----------------------------------------------------------------------===//
14
Sean Silvae3c18a52016-07-19 23:54:23 +000015#include "llvm/Transforms/Scalar/LoopUnrollPass.h"
Chandler Carruth3b057b32015-02-13 03:57:40 +000016#include "llvm/ADT/SetVector.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000017#include "llvm/Analysis/AssumptionCache.h"
Chris Lattner679572e2011-01-02 07:35:53 +000018#include "llvm/Analysis/CodeMetrics.h"
Dehao Chend55bc4c2016-05-05 00:54:54 +000019#include "llvm/Analysis/GlobalsModRef.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000020#include "llvm/Analysis/InstructionSimplify.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000021#include "llvm/Analysis/LoopPass.h"
Michael Zolotukhin1da4afd2016-02-08 23:03:59 +000022#include "llvm/Analysis/LoopUnrollAnalyzer.h"
Adam Nemet12937c32016-07-29 19:29:47 +000023#include "llvm/Analysis/OptimizationDiagnosticInfo.h"
Dan Gohman0141c132010-07-26 18:11:16 +000024#include "llvm/Analysis/ScalarEvolution.h"
Michael Zolotukhina9aadd22015-02-05 02:34:00 +000025#include "llvm/Analysis/ScalarEvolutionExpressions.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000027#include "llvm/IR/Dominators.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000028#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/IntrinsicInst.h"
Eli Benderskyff903242014-06-16 23:53:02 +000030#include "llvm/IR/Metadata.h"
Reid Spencer7c16caa2004-09-01 22:55:40 +000031#include "llvm/Support/CommandLine.h"
32#include "llvm/Support/Debug.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000033#include "llvm/Support/raw_ostream.h"
Dehao Chend55bc4c2016-05-05 00:54:54 +000034#include "llvm/Transforms/Scalar.h"
Chandler Carruth3bab7e12017-01-11 09:43:56 +000035#include "llvm/Transforms/Scalar/LoopPassManager.h"
Chandler Carruth31088a92016-02-19 10:45:18 +000036#include "llvm/Transforms/Utils/LoopUtils.h"
Dan Gohman3dc2d922008-05-14 00:24:14 +000037#include "llvm/Transforms/Utils/UnrollLoop.h"
Duncan Sands67933e62008-05-16 09:30:00 +000038#include <climits>
Benjamin Kramer82de7d32016-05-27 14:27:24 +000039#include <utility>
Chris Lattner946b2552004-04-18 05:20:17 +000040
Dan Gohman3dc2d922008-05-14 00:24:14 +000041using namespace llvm;
Chris Lattner946b2552004-04-18 05:20:17 +000042
Chandler Carruth964daaa2014-04-22 02:55:47 +000043#define DEBUG_TYPE "loop-unroll"
44
Dan Gohmand78c4002008-05-13 00:00:25 +000045static cl::opt<unsigned>
Justin Bognera1dd4932016-01-12 00:55:26 +000046 UnrollThreshold("unroll-threshold", cl::Hidden,
Dehao Chenc3f87f02017-01-17 23:39:33 +000047 cl::desc("The cost threshold for loop unrolling"));
48
49static cl::opt<unsigned> UnrollPartialThreshold(
50 "unroll-partial-threshold", cl::Hidden,
51 cl::desc("The cost threshold for partial loop unrolling"));
Chandler Carruth9dabd142015-06-05 17:01:43 +000052
Dehao Chencc763442016-12-30 00:50:28 +000053static cl::opt<unsigned> UnrollMaxPercentThresholdBoost(
54 "unroll-max-percent-threshold-boost", cl::init(400), cl::Hidden,
55 cl::desc("The maximum 'boost' (represented as a percentage >= 100) applied "
56 "to the threshold when aggressively unrolling a loop due to the "
57 "dynamic cost savings. If completely unrolling a loop will reduce "
58 "the total runtime from X to Y, we boost the loop unroll "
59 "threshold to DefaultThreshold*std::min(MaxPercentThresholdBoost, "
60 "X/Y). This limit avoids excessive code bloat."));
Dan Gohmand78c4002008-05-13 00:00:25 +000061
Michael Zolotukhina9aadd22015-02-05 02:34:00 +000062static cl::opt<unsigned> UnrollMaxIterationsCountToAnalyze(
Michael Zolotukhin8f7a2422016-05-24 23:00:05 +000063 "unroll-max-iteration-count-to-analyze", cl::init(10), cl::Hidden,
Michael Zolotukhina9aadd22015-02-05 02:34:00 +000064 cl::desc("Don't allow loop unrolling to simulate more than this number of"
65 "iterations when checking full unroll profitability"));
66
Dehao Chend55bc4c2016-05-05 00:54:54 +000067static cl::opt<unsigned> UnrollCount(
68 "unroll-count", cl::Hidden,
69 cl::desc("Use this unroll count for all loops including those with "
70 "unroll_count pragma values, for testing purposes"));
Dan Gohmand78c4002008-05-13 00:00:25 +000071
Dehao Chend55bc4c2016-05-05 00:54:54 +000072static cl::opt<unsigned> UnrollMaxCount(
73 "unroll-max-count", cl::Hidden,
74 cl::desc("Set the max unroll count for partial and runtime unrolling, for"
75 "testing purposes"));
Fiona Glaser045afc42016-04-06 16:57:25 +000076
Dehao Chend55bc4c2016-05-05 00:54:54 +000077static cl::opt<unsigned> UnrollFullMaxCount(
78 "unroll-full-max-count", cl::Hidden,
79 cl::desc(
80 "Set the max unroll count for full unrolling, for testing purposes"));
Fiona Glaser045afc42016-04-06 16:57:25 +000081
Matthijs Kooijman98b5c162008-07-29 13:21:23 +000082static cl::opt<bool>
Dehao Chend55bc4c2016-05-05 00:54:54 +000083 UnrollAllowPartial("unroll-allow-partial", cl::Hidden,
84 cl::desc("Allows loops to be partially unrolled until "
85 "-unroll-threshold loop size is reached."));
Matthijs Kooijman98b5c162008-07-29 13:21:23 +000086
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +000087static cl::opt<bool> UnrollAllowRemainder(
88 "unroll-allow-remainder", cl::Hidden,
89 cl::desc("Allow generation of a loop remainder (extra iterations) "
90 "when unrolling a loop."));
91
Andrew Trickd04d15292011-12-09 06:19:40 +000092static cl::opt<bool>
Dehao Chend55bc4c2016-05-05 00:54:54 +000093 UnrollRuntime("unroll-runtime", cl::ZeroOrMore, cl::Hidden,
94 cl::desc("Unroll loops with run-time trip counts"));
Andrew Trickd04d15292011-12-09 06:19:40 +000095
Haicheng Wu1ef17e92016-10-12 21:29:38 +000096static cl::opt<unsigned> UnrollMaxUpperBound(
97 "unroll-max-upperbound", cl::init(8), cl::Hidden,
98 cl::desc(
99 "The max of trip count upper bound that is considered in unrolling"));
100
Dehao Chend55bc4c2016-05-05 00:54:54 +0000101static cl::opt<unsigned> PragmaUnrollThreshold(
102 "pragma-unroll-threshold", cl::init(16 * 1024), cl::Hidden,
103 cl::desc("Unrolled size limit for loops with an unroll(full) or "
104 "unroll_count pragma."));
Justin Bognera1dd4932016-01-12 00:55:26 +0000105
Dehao Chen41d72a82016-11-17 01:17:02 +0000106static cl::opt<unsigned> FlatLoopTripCountThreshold(
107 "flat-loop-tripcount-threshold", cl::init(5), cl::Hidden,
108 cl::desc("If the runtime tripcount for the loop is lower than the "
109 "threshold, the loop is considered as flat and will be less "
110 "aggressively unrolled."));
111
Michael Kupersteinb151a642016-11-30 21:13:57 +0000112static cl::opt<bool>
113 UnrollAllowPeeling("unroll-allow-peeling", cl::Hidden,
114 cl::desc("Allows loops to be peeled when the dynamic "
115 "trip count is known to be low."));
116
Chandler Carruthce40fa12017-01-25 02:49:01 +0000117// This option isn't ever intended to be enabled, it serves to allow
118// experiments to check the assumptions about when this kind of revisit is
119// necessary.
120static cl::opt<bool> UnrollRevisitChildLoops(
121 "unroll-revisit-child-loops", cl::Hidden,
122 cl::desc("Enqueue and re-visit child loops in the loop PM after unrolling. "
123 "This shouldn't typically be needed as child loops (or their "
124 "clones) were already visited."));
125
Justin Bognera1dd4932016-01-12 00:55:26 +0000126/// A magic value for use with the Threshold parameter to indicate
127/// that the loop unroll should be performed regardless of how much
128/// code expansion would result.
129static const unsigned NoThreshold = UINT_MAX;
130
Justin Bognera1dd4932016-01-12 00:55:26 +0000131/// Gather the various unrolling parameters based on the defaults, compiler
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000132/// flags, TTI overrides and user specified parameters.
Justin Bognera1dd4932016-01-12 00:55:26 +0000133static TargetTransformInfo::UnrollingPreferences gatherUnrollingPreferences(
134 Loop *L, const TargetTransformInfo &TTI, Optional<unsigned> UserThreshold,
135 Optional<unsigned> UserCount, Optional<bool> UserAllowPartial,
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000136 Optional<bool> UserRuntime, Optional<bool> UserUpperBound) {
Justin Bognera1dd4932016-01-12 00:55:26 +0000137 TargetTransformInfo::UnrollingPreferences UP;
138
139 // Set up the defaults
140 UP.Threshold = 150;
Dehao Chencc763442016-12-30 00:50:28 +0000141 UP.MaxPercentThresholdBoost = 400;
Hans Wennborg719b26b2016-05-10 21:45:55 +0000142 UP.OptSizeThreshold = 0;
Dehao Chenc3f87f02017-01-17 23:39:33 +0000143 UP.PartialThreshold = 150;
Hans Wennborg719b26b2016-05-10 21:45:55 +0000144 UP.PartialOptSizeThreshold = 0;
Justin Bognera1dd4932016-01-12 00:55:26 +0000145 UP.Count = 0;
Michael Kupersteinb151a642016-11-30 21:13:57 +0000146 UP.PeelCount = 0;
Jonas Paulsson58c5a7f2016-09-28 09:41:38 +0000147 UP.DefaultUnrollRuntimeCount = 8;
Justin Bognera1dd4932016-01-12 00:55:26 +0000148 UP.MaxCount = UINT_MAX;
Fiona Glaser045afc42016-04-06 16:57:25 +0000149 UP.FullUnrollMaxCount = UINT_MAX;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000150 UP.BEInsns = 2;
Justin Bognera1dd4932016-01-12 00:55:26 +0000151 UP.Partial = false;
152 UP.Runtime = false;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000153 UP.AllowRemainder = true;
Justin Bognera1dd4932016-01-12 00:55:26 +0000154 UP.AllowExpensiveTripCount = false;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000155 UP.Force = false;
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000156 UP.UpperBound = false;
Michael Kupersteinb151a642016-11-30 21:13:57 +0000157 UP.AllowPeeling = false;
Justin Bognera1dd4932016-01-12 00:55:26 +0000158
159 // Override with any target specific settings
160 TTI.getUnrollingPreferences(L, UP);
161
162 // Apply size attributes
163 if (L->getHeader()->getParent()->optForSize()) {
164 UP.Threshold = UP.OptSizeThreshold;
165 UP.PartialThreshold = UP.PartialOptSizeThreshold;
166 }
167
Justin Bognera1dd4932016-01-12 00:55:26 +0000168 // Apply any user values specified by cl::opt
Dehao Chenc3f87f02017-01-17 23:39:33 +0000169 if (UnrollThreshold.getNumOccurrences() > 0)
Justin Bognera1dd4932016-01-12 00:55:26 +0000170 UP.Threshold = UnrollThreshold;
Dehao Chenc3f87f02017-01-17 23:39:33 +0000171 if (UnrollPartialThreshold.getNumOccurrences() > 0)
172 UP.PartialThreshold = UnrollPartialThreshold;
Dehao Chencc763442016-12-30 00:50:28 +0000173 if (UnrollMaxPercentThresholdBoost.getNumOccurrences() > 0)
174 UP.MaxPercentThresholdBoost = UnrollMaxPercentThresholdBoost;
Fiona Glaser045afc42016-04-06 16:57:25 +0000175 if (UnrollMaxCount.getNumOccurrences() > 0)
176 UP.MaxCount = UnrollMaxCount;
177 if (UnrollFullMaxCount.getNumOccurrences() > 0)
178 UP.FullUnrollMaxCount = UnrollFullMaxCount;
Justin Bognera1dd4932016-01-12 00:55:26 +0000179 if (UnrollAllowPartial.getNumOccurrences() > 0)
180 UP.Partial = UnrollAllowPartial;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000181 if (UnrollAllowRemainder.getNumOccurrences() > 0)
182 UP.AllowRemainder = UnrollAllowRemainder;
Justin Bognera1dd4932016-01-12 00:55:26 +0000183 if (UnrollRuntime.getNumOccurrences() > 0)
184 UP.Runtime = UnrollRuntime;
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000185 if (UnrollMaxUpperBound == 0)
186 UP.UpperBound = false;
Michael Kupersteinb151a642016-11-30 21:13:57 +0000187 if (UnrollAllowPeeling.getNumOccurrences() > 0)
188 UP.AllowPeeling = UnrollAllowPeeling;
Justin Bognera1dd4932016-01-12 00:55:26 +0000189
190 // Apply user values provided by argument
191 if (UserThreshold.hasValue()) {
192 UP.Threshold = *UserThreshold;
193 UP.PartialThreshold = *UserThreshold;
194 }
195 if (UserCount.hasValue())
196 UP.Count = *UserCount;
197 if (UserAllowPartial.hasValue())
198 UP.Partial = *UserAllowPartial;
199 if (UserRuntime.hasValue())
200 UP.Runtime = *UserRuntime;
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000201 if (UserUpperBound.hasValue())
202 UP.UpperBound = *UserUpperBound;
Justin Bognera1dd4932016-01-12 00:55:26 +0000203
Justin Bognera1dd4932016-01-12 00:55:26 +0000204 return UP;
205}
206
Chris Lattner79a42ac2006-12-19 21:40:18 +0000207namespace {
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000208/// A struct to densely store the state of an instruction after unrolling at
209/// each iteration.
210///
211/// This is designed to work like a tuple of <Instruction *, int> for the
212/// purposes of hashing and lookup, but to be able to associate two boolean
213/// states with each key.
214struct UnrolledInstState {
215 Instruction *I;
216 int Iteration : 30;
217 unsigned IsFree : 1;
218 unsigned IsCounted : 1;
219};
220
221/// Hashing and equality testing for a set of the instruction states.
222struct UnrolledInstStateKeyInfo {
223 typedef DenseMapInfo<Instruction *> PtrInfo;
224 typedef DenseMapInfo<std::pair<Instruction *, int>> PairInfo;
225 static inline UnrolledInstState getEmptyKey() {
226 return {PtrInfo::getEmptyKey(), 0, 0, 0};
227 }
228 static inline UnrolledInstState getTombstoneKey() {
229 return {PtrInfo::getTombstoneKey(), 0, 0, 0};
230 }
231 static inline unsigned getHashValue(const UnrolledInstState &S) {
232 return PairInfo::getHashValue({S.I, S.Iteration});
233 }
234 static inline bool isEqual(const UnrolledInstState &LHS,
235 const UnrolledInstState &RHS) {
236 return PairInfo::isEqual({LHS.I, LHS.Iteration}, {RHS.I, RHS.Iteration});
237 }
238};
239}
240
241namespace {
Chandler Carruth02156082015-05-22 17:41:35 +0000242struct EstimatedUnrollCost {
Chandler Carruth9dabd142015-06-05 17:01:43 +0000243 /// \brief The estimated cost after unrolling.
Dehao Chenc3be2252016-12-02 03:17:07 +0000244 unsigned UnrolledCost;
Chandler Carruth302a1332015-02-13 02:10:56 +0000245
Chandler Carruth9dabd142015-06-05 17:01:43 +0000246 /// \brief The estimated dynamic cost of executing the instructions in the
247 /// rolled form.
Dehao Chenc3be2252016-12-02 03:17:07 +0000248 unsigned RolledDynamicCost;
Chandler Carruth02156082015-05-22 17:41:35 +0000249};
250}
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000251
Chandler Carruth02156082015-05-22 17:41:35 +0000252/// \brief Figure out if the loop is worth full unrolling.
253///
254/// Complete loop unrolling can make some loads constant, and we need to know
255/// if that would expose any further optimization opportunities. This routine
Michael Zolotukhinc4e4f332015-06-11 22:17:39 +0000256/// estimates this optimization. It computes cost of unrolled loop
257/// (UnrolledCost) and dynamic cost of the original loop (RolledDynamicCost). By
258/// dynamic cost we mean that we won't count costs of blocks that are known not
259/// to be executed (i.e. if we have a branch in the loop and we know that at the
260/// given iteration its condition would be resolved to true, we won't add up the
261/// cost of the 'false'-block).
262/// \returns Optional value, holding the RolledDynamicCost and UnrolledCost. If
263/// the analysis failed (no benefits expected from the unrolling, or the loop is
264/// too big to analyze), the returned value is None.
Benjamin Kramerfcdb1c12015-08-20 09:57:22 +0000265static Optional<EstimatedUnrollCost>
Chandler Carruth87adb7a2015-08-03 20:32:27 +0000266analyzeLoopUnrollCost(const Loop *L, unsigned TripCount, DominatorTree &DT,
267 ScalarEvolution &SE, const TargetTransformInfo &TTI,
Dehao Chenc3be2252016-12-02 03:17:07 +0000268 unsigned MaxUnrolledLoopSize) {
Chandler Carruth02156082015-05-22 17:41:35 +0000269 // We want to be able to scale offsets by the trip count and add more offsets
270 // to them without checking for overflows, and we already don't want to
271 // analyze *massive* trip counts, so we force the max to be reasonably small.
272 assert(UnrollMaxIterationsCountToAnalyze < (INT_MAX / 2) &&
273 "The unroll iterations max is too large!");
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000274
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000275 // Only analyze inner loops. We can't properly estimate cost of nested loops
276 // and we won't visit inner loops again anyway.
277 if (!L->empty())
278 return None;
279
Chandler Carruth02156082015-05-22 17:41:35 +0000280 // Don't simulate loops with a big or unknown tripcount
281 if (!UnrollMaxIterationsCountToAnalyze || !TripCount ||
282 TripCount > UnrollMaxIterationsCountToAnalyze)
283 return None;
Chandler Carrutha6ae8772015-05-12 23:32:56 +0000284
Chandler Carruth02156082015-05-22 17:41:35 +0000285 SmallSetVector<BasicBlock *, 16> BBWorklist;
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000286 SmallSetVector<std::pair<BasicBlock *, BasicBlock *>, 4> ExitWorklist;
Chandler Carruth02156082015-05-22 17:41:35 +0000287 DenseMap<Value *, Constant *> SimplifiedValues;
Chandler Carruth87adb7a2015-08-03 20:32:27 +0000288 SmallVector<std::pair<Value *, Constant *>, 4> SimplifiedInputValues;
Chandler Carruth3b057b32015-02-13 03:57:40 +0000289
Chandler Carruth9dabd142015-06-05 17:01:43 +0000290 // The estimated cost of the unrolled form of the loop. We try to estimate
291 // this by simplifying as much as we can while computing the estimate.
Dehao Chenc3be2252016-12-02 03:17:07 +0000292 unsigned UnrolledCost = 0;
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000293
Chandler Carruth9dabd142015-06-05 17:01:43 +0000294 // We also track the estimated dynamic (that is, actually executed) cost in
295 // the rolled form. This helps identify cases when the savings from unrolling
296 // aren't just exposing dead control flows, but actual reduced dynamic
297 // instructions due to the simplifications which we expect to occur after
298 // unrolling.
Dehao Chenc3be2252016-12-02 03:17:07 +0000299 unsigned RolledDynamicCost = 0;
Chandler Carruth8c863752015-02-13 03:48:38 +0000300
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000301 // We track the simplification of each instruction in each iteration. We use
302 // this to recursively merge costs into the unrolled cost on-demand so that
303 // we don't count the cost of any dead code. This is essentially a map from
304 // <instruction, int> to <bool, bool>, but stored as a densely packed struct.
305 DenseSet<UnrolledInstState, UnrolledInstStateKeyInfo> InstCostMap;
306
307 // A small worklist used to accumulate cost of instructions from each
308 // observable and reached root in the loop.
309 SmallVector<Instruction *, 16> CostWorklist;
310
311 // PHI-used worklist used between iterations while accumulating cost.
312 SmallVector<Instruction *, 4> PHIUsedList;
313
314 // Helper function to accumulate cost for instructions in the loop.
315 auto AddCostRecursively = [&](Instruction &RootI, int Iteration) {
316 assert(Iteration >= 0 && "Cannot have a negative iteration!");
317 assert(CostWorklist.empty() && "Must start with an empty cost list");
318 assert(PHIUsedList.empty() && "Must start with an empty phi used list");
319 CostWorklist.push_back(&RootI);
320 for (;; --Iteration) {
321 do {
322 Instruction *I = CostWorklist.pop_back_val();
323
324 // InstCostMap only uses I and Iteration as a key, the other two values
325 // don't matter here.
326 auto CostIter = InstCostMap.find({I, Iteration, 0, 0});
327 if (CostIter == InstCostMap.end())
328 // If an input to a PHI node comes from a dead path through the loop
329 // we may have no cost data for it here. What that actually means is
330 // that it is free.
331 continue;
332 auto &Cost = *CostIter;
333 if (Cost.IsCounted)
334 // Already counted this instruction.
335 continue;
336
337 // Mark that we are counting the cost of this instruction now.
338 Cost.IsCounted = true;
339
340 // If this is a PHI node in the loop header, just add it to the PHI set.
341 if (auto *PhiI = dyn_cast<PHINode>(I))
342 if (PhiI->getParent() == L->getHeader()) {
343 assert(Cost.IsFree && "Loop PHIs shouldn't be evaluated as they "
344 "inherently simplify during unrolling.");
345 if (Iteration == 0)
346 continue;
347
348 // Push the incoming value from the backedge into the PHI used list
349 // if it is an in-loop instruction. We'll use this to populate the
350 // cost worklist for the next iteration (as we count backwards).
351 if (auto *OpI = dyn_cast<Instruction>(
352 PhiI->getIncomingValueForBlock(L->getLoopLatch())))
353 if (L->contains(OpI))
354 PHIUsedList.push_back(OpI);
355 continue;
356 }
357
358 // First accumulate the cost of this instruction.
359 if (!Cost.IsFree) {
360 UnrolledCost += TTI.getUserCost(I);
361 DEBUG(dbgs() << "Adding cost of instruction (iteration " << Iteration
362 << "): ");
363 DEBUG(I->dump());
364 }
365
366 // We must count the cost of every operand which is not free,
367 // recursively. If we reach a loop PHI node, simply add it to the set
368 // to be considered on the next iteration (backwards!).
369 for (Value *Op : I->operands()) {
370 // Check whether this operand is free due to being a constant or
371 // outside the loop.
372 auto *OpI = dyn_cast<Instruction>(Op);
373 if (!OpI || !L->contains(OpI))
374 continue;
375
376 // Otherwise accumulate its cost.
377 CostWorklist.push_back(OpI);
378 }
379 } while (!CostWorklist.empty());
380
381 if (PHIUsedList.empty())
382 // We've exhausted the search.
383 break;
384
385 assert(Iteration > 0 &&
386 "Cannot track PHI-used values past the first iteration!");
387 CostWorklist.append(PHIUsedList.begin(), PHIUsedList.end());
388 PHIUsedList.clear();
389 }
390 };
391
Chandler Carruth87adb7a2015-08-03 20:32:27 +0000392 // Ensure that we don't violate the loop structure invariants relied on by
393 // this analysis.
394 assert(L->isLoopSimplifyForm() && "Must put loop into normal form first.");
395 assert(L->isLCSSAForm(DT) &&
396 "Must have loops in LCSSA form to track live-out values.");
397
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000398 DEBUG(dbgs() << "Starting LoopUnroll profitability analysis...\n");
399
Chandler Carruth02156082015-05-22 17:41:35 +0000400 // Simulate execution of each iteration of the loop counting instructions,
401 // which would be simplified.
402 // Since the same load will take different values on different iterations,
403 // we literally have to go through all loop's iterations.
404 for (unsigned Iteration = 0; Iteration < TripCount; ++Iteration) {
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000405 DEBUG(dbgs() << " Analyzing iteration " << Iteration << "\n");
Chandler Carruth87adb7a2015-08-03 20:32:27 +0000406
407 // Prepare for the iteration by collecting any simplified entry or backedge
408 // inputs.
409 for (Instruction &I : *L->getHeader()) {
410 auto *PHI = dyn_cast<PHINode>(&I);
411 if (!PHI)
412 break;
413
414 // The loop header PHI nodes must have exactly two input: one from the
415 // loop preheader and one from the loop latch.
416 assert(
417 PHI->getNumIncomingValues() == 2 &&
418 "Must have an incoming value only for the preheader and the latch.");
419
420 Value *V = PHI->getIncomingValueForBlock(
421 Iteration == 0 ? L->getLoopPreheader() : L->getLoopLatch());
422 Constant *C = dyn_cast<Constant>(V);
423 if (Iteration != 0 && !C)
424 C = SimplifiedValues.lookup(V);
425 if (C)
426 SimplifiedInputValues.push_back({PHI, C});
427 }
428
429 // Now clear and re-populate the map for the next iteration.
Chandler Carruth02156082015-05-22 17:41:35 +0000430 SimplifiedValues.clear();
Chandler Carruth87adb7a2015-08-03 20:32:27 +0000431 while (!SimplifiedInputValues.empty())
432 SimplifiedValues.insert(SimplifiedInputValues.pop_back_val());
433
Michael Zolotukhin9f520eb2016-02-26 02:57:05 +0000434 UnrolledInstAnalyzer Analyzer(Iteration, SimplifiedValues, SE, L);
Chandler Carruthf174a152015-05-22 02:47:29 +0000435
Chandler Carruth02156082015-05-22 17:41:35 +0000436 BBWorklist.clear();
437 BBWorklist.insert(L->getHeader());
438 // Note that we *must not* cache the size, this loop grows the worklist.
439 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
440 BasicBlock *BB = BBWorklist[Idx];
Chandler Carruthf174a152015-05-22 02:47:29 +0000441
Chandler Carruth02156082015-05-22 17:41:35 +0000442 // Visit all instructions in the given basic block and try to simplify
443 // it. We don't change the actual IR, just count optimization
444 // opportunities.
445 for (Instruction &I : *BB) {
Dehao Chen977853b2016-09-30 18:30:04 +0000446 if (isa<DbgInfoIntrinsic>(I))
447 continue;
448
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000449 // Track this instruction's expected baseline cost when executing the
450 // rolled loop form.
451 RolledDynamicCost += TTI.getUserCost(&I);
Chandler Carruth17a04962015-02-13 03:49:41 +0000452
Chandler Carruth02156082015-05-22 17:41:35 +0000453 // Visit the instruction to analyze its loop cost after unrolling,
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000454 // and if the visitor returns true, mark the instruction as free after
455 // unrolling and continue.
456 bool IsFree = Analyzer.visit(I);
457 bool Inserted = InstCostMap.insert({&I, (int)Iteration,
458 (unsigned)IsFree,
459 /*IsCounted*/ false}).second;
460 (void)Inserted;
461 assert(Inserted && "Cannot have a state for an unvisited instruction!");
Chandler Carruth9dabd142015-06-05 17:01:43 +0000462
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000463 if (IsFree)
464 continue;
465
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000466 // Can't properly model a cost of a call.
467 // FIXME: With a proper cost model we should be able to do it.
468 if(isa<CallInst>(&I))
469 return None;
Chandler Carruth02156082015-05-22 17:41:35 +0000470
Haicheng Wue7877632016-08-17 22:42:58 +0000471 // If the instruction might have a side-effect recursively account for
472 // the cost of it and all the instructions leading up to it.
473 if (I.mayHaveSideEffects())
474 AddCostRecursively(I, Iteration);
475
Chandler Carruth02156082015-05-22 17:41:35 +0000476 // If unrolled body turns out to be too big, bail out.
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000477 if (UnrolledCost > MaxUnrolledLoopSize) {
478 DEBUG(dbgs() << " Exceeded threshold.. exiting.\n"
479 << " UnrolledCost: " << UnrolledCost
480 << ", MaxUnrolledLoopSize: " << MaxUnrolledLoopSize
481 << "\n");
Chandler Carruth02156082015-05-22 17:41:35 +0000482 return None;
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000483 }
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000484 }
Chandler Carruth415f4122015-02-13 02:17:39 +0000485
Michael Zolotukhin57776b82015-07-24 01:53:04 +0000486 TerminatorInst *TI = BB->getTerminator();
487
488 // Add in the live successors by first checking whether we have terminator
489 // that may be simplified based on the values simplified by this call.
Michael Zolotukhin1ecdeda2016-05-26 21:42:51 +0000490 BasicBlock *KnownSucc = nullptr;
Michael Zolotukhin57776b82015-07-24 01:53:04 +0000491 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
492 if (BI->isConditional()) {
493 if (Constant *SimpleCond =
494 SimplifiedValues.lookup(BI->getCondition())) {
Michael Zolotukhin3a7d55b2015-07-29 18:10:29 +0000495 // Just take the first successor if condition is undef
496 if (isa<UndefValue>(SimpleCond))
Michael Zolotukhin1ecdeda2016-05-26 21:42:51 +0000497 KnownSucc = BI->getSuccessor(0);
498 else if (ConstantInt *SimpleCondVal =
499 dyn_cast<ConstantInt>(SimpleCond))
500 KnownSucc = BI->getSuccessor(SimpleCondVal->isZero() ? 1 : 0);
Michael Zolotukhin57776b82015-07-24 01:53:04 +0000501 }
502 }
503 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
504 if (Constant *SimpleCond =
505 SimplifiedValues.lookup(SI->getCondition())) {
Michael Zolotukhin3a7d55b2015-07-29 18:10:29 +0000506 // Just take the first successor if condition is undef
507 if (isa<UndefValue>(SimpleCond))
Michael Zolotukhin1ecdeda2016-05-26 21:42:51 +0000508 KnownSucc = SI->getSuccessor(0);
509 else if (ConstantInt *SimpleCondVal =
510 dyn_cast<ConstantInt>(SimpleCond))
511 KnownSucc = SI->findCaseValue(SimpleCondVal).getCaseSuccessor();
Michael Zolotukhin57776b82015-07-24 01:53:04 +0000512 }
513 }
Michael Zolotukhin1ecdeda2016-05-26 21:42:51 +0000514 if (KnownSucc) {
515 if (L->contains(KnownSucc))
516 BBWorklist.insert(KnownSucc);
517 else
518 ExitWorklist.insert({BB, KnownSucc});
519 continue;
520 }
Michael Zolotukhin57776b82015-07-24 01:53:04 +0000521
Chandler Carruth02156082015-05-22 17:41:35 +0000522 // Add BB's successors to the worklist.
523 for (BasicBlock *Succ : successors(BB))
524 if (L->contains(Succ))
525 BBWorklist.insert(Succ);
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000526 else
527 ExitWorklist.insert({BB, Succ});
Michael Zolotukhind2268a72016-05-18 21:20:12 +0000528 AddCostRecursively(*TI, Iteration);
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000529 }
Chandler Carruth02156082015-05-22 17:41:35 +0000530
531 // If we found no optimization opportunities on the first iteration, we
532 // won't find them on later ones too.
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000533 if (UnrolledCost == RolledDynamicCost) {
534 DEBUG(dbgs() << " No opportunities found.. exiting.\n"
535 << " UnrolledCost: " << UnrolledCost << "\n");
Chandler Carruth02156082015-05-22 17:41:35 +0000536 return None;
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000537 }
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000538 }
Michael Zolotukhin963a6d92016-05-13 21:23:25 +0000539
540 while (!ExitWorklist.empty()) {
541 BasicBlock *ExitingBB, *ExitBB;
542 std::tie(ExitingBB, ExitBB) = ExitWorklist.pop_back_val();
543
544 for (Instruction &I : *ExitBB) {
545 auto *PN = dyn_cast<PHINode>(&I);
546 if (!PN)
547 break;
548
549 Value *Op = PN->getIncomingValueForBlock(ExitingBB);
550 if (auto *OpI = dyn_cast<Instruction>(Op))
551 if (L->contains(OpI))
552 AddCostRecursively(*OpI, TripCount - 1);
553 }
554 }
555
Michael Zolotukhin80d13ba2015-07-28 20:07:29 +0000556 DEBUG(dbgs() << "Analysis finished:\n"
557 << "UnrolledCost: " << UnrolledCost << ", "
558 << "RolledDynamicCost: " << RolledDynamicCost << "\n");
Chandler Carruth9dabd142015-06-05 17:01:43 +0000559 return {{UnrolledCost, RolledDynamicCost}};
Chandler Carruth02156082015-05-22 17:41:35 +0000560}
Michael Zolotukhina9aadd22015-02-05 02:34:00 +0000561
Dan Gohman49d08a52007-05-08 15:14:19 +0000562/// ApproximateLoopSize - Approximate the size of the loop.
Andrew Trickf7656012011-10-01 01:39:05 +0000563static unsigned ApproximateLoopSize(const Loop *L, unsigned &NumCalls,
Justin Lebar6827de12016-03-14 23:15:34 +0000564 bool &NotDuplicatable, bool &Convergent,
Hal Finkel57f03dd2014-09-07 13:49:57 +0000565 const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000566 AssumptionCache *AC, unsigned BEInsns) {
Hal Finkel57f03dd2014-09-07 13:49:57 +0000567 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000568 CodeMetrics::collectEphemeralValues(L, AC, EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +0000569
Dan Gohman969e83a2009-10-31 14:54:17 +0000570 CodeMetrics Metrics;
Sanjay Patel5c967232016-03-08 19:06:12 +0000571 for (BasicBlock *BB : L->blocks())
572 Metrics.analyzeBasicBlock(BB, TTI, EphValues);
Owen Anderson04cf3fd2010-09-09 20:32:23 +0000573 NumCalls = Metrics.NumInlineCandidates;
James Molloy4f6fb952012-12-20 16:04:27 +0000574 NotDuplicatable = Metrics.notDuplicatable;
Justin Lebar6827de12016-03-14 23:15:34 +0000575 Convergent = Metrics.convergent;
Andrew Trick279e7a62011-07-23 00:29:16 +0000576
Owen Anderson62ea1b72010-09-09 19:07:31 +0000577 unsigned LoopSize = Metrics.NumInsts;
Andrew Trick279e7a62011-07-23 00:29:16 +0000578
Owen Anderson62ea1b72010-09-09 19:07:31 +0000579 // Don't allow an estimate of size zero. This would allows unrolling of loops
580 // with huge iteration counts, which is a compile time problem even if it's
Hal Finkel38dd5902015-01-10 00:30:55 +0000581 // not a problem for code quality. Also, the code using this size may assume
582 // that each loop has at least three instructions (likely a conditional
583 // branch, a comparison feeding that branch, and some kind of loop increment
584 // feeding that comparison instruction).
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000585 LoopSize = std::max(LoopSize, BEInsns + 1);
Andrew Trick279e7a62011-07-23 00:29:16 +0000586
Owen Anderson62ea1b72010-09-09 19:07:31 +0000587 return LoopSize;
Chris Lattner946b2552004-04-18 05:20:17 +0000588}
589
Mark Heffernane6b4ba12014-07-23 17:31:37 +0000590// Returns the loop hint metadata node with the given name (for example,
591// "llvm.loop.unroll.count"). If no such metadata node exists, then nullptr is
592// returned.
Jingyue Wu49a766e2015-02-02 20:41:11 +0000593static MDNode *GetUnrollMetadataForLoop(const Loop *L, StringRef Name) {
594 if (MDNode *LoopID = L->getLoopID())
595 return GetUnrollMetadata(LoopID, Name);
596 return nullptr;
Eli Benderskyff903242014-06-16 23:53:02 +0000597}
598
Mark Heffernane6b4ba12014-07-23 17:31:37 +0000599// Returns true if the loop has an unroll(full) pragma.
600static bool HasUnrollFullPragma(const Loop *L) {
Jingyue Wu0220df02015-02-01 02:27:45 +0000601 return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.full");
Eli Benderskyff903242014-06-16 23:53:02 +0000602}
603
Mark Heffernan89391542015-08-10 17:28:08 +0000604// Returns true if the loop has an unroll(enable) pragma. This metadata is used
605// for both "#pragma unroll" and "#pragma clang loop unroll(enable)" directives.
606static bool HasUnrollEnablePragma(const Loop *L) {
607 return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.enable");
608}
609
Eli Benderskyff903242014-06-16 23:53:02 +0000610// Returns true if the loop has an unroll(disable) pragma.
611static bool HasUnrollDisablePragma(const Loop *L) {
Jingyue Wu0220df02015-02-01 02:27:45 +0000612 return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.disable");
Eli Benderskyff903242014-06-16 23:53:02 +0000613}
614
Kevin Qin715b01e2015-03-09 06:14:18 +0000615// Returns true if the loop has an runtime unroll(disable) pragma.
616static bool HasRuntimeUnrollDisablePragma(const Loop *L) {
617 return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.runtime.disable");
618}
619
Eli Benderskyff903242014-06-16 23:53:02 +0000620// If loop has an unroll_count pragma return the (necessarily
621// positive) value from the pragma. Otherwise return 0.
622static unsigned UnrollCountPragmaValue(const Loop *L) {
Jingyue Wu49a766e2015-02-02 20:41:11 +0000623 MDNode *MD = GetUnrollMetadataForLoop(L, "llvm.loop.unroll.count");
Mark Heffernane6b4ba12014-07-23 17:31:37 +0000624 if (MD) {
625 assert(MD->getNumOperands() == 2 &&
626 "Unroll count hint metadata should have two operands.");
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000627 unsigned Count =
628 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue();
Eli Benderskyff903242014-06-16 23:53:02 +0000629 assert(Count >= 1 && "Unroll count must be positive.");
630 return Count;
631 }
632 return 0;
633}
634
Mark Heffernan053a6862014-07-18 21:04:33 +0000635// Remove existing unroll metadata and add unroll disable metadata to
636// indicate the loop has already been unrolled. This prevents a loop
637// from being unrolled more than is directed by a pragma if the loop
638// unrolling pass is run more than once (which it generally is).
639static void SetLoopAlreadyUnrolled(Loop *L) {
640 MDNode *LoopID = L->getLoopID();
Mark Heffernan053a6862014-07-18 21:04:33 +0000641 // First remove any existing loop unrolling metadata.
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000642 SmallVector<Metadata *, 4> MDs;
Mark Heffernan053a6862014-07-18 21:04:33 +0000643 // Reserve first location for self reference to the LoopID metadata node.
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000644 MDs.push_back(nullptr);
Evgeny Stupachenko3e2f3892016-06-08 20:21:24 +0000645
646 if (LoopID) {
647 for (unsigned i = 1, ie = LoopID->getNumOperands(); i < ie; ++i) {
648 bool IsUnrollMetadata = false;
649 MDNode *MD = dyn_cast<MDNode>(LoopID->getOperand(i));
650 if (MD) {
651 const MDString *S = dyn_cast<MDString>(MD->getOperand(0));
652 IsUnrollMetadata = S && S->getString().startswith("llvm.loop.unroll.");
653 }
654 if (!IsUnrollMetadata)
655 MDs.push_back(LoopID->getOperand(i));
Mark Heffernan053a6862014-07-18 21:04:33 +0000656 }
Mark Heffernan053a6862014-07-18 21:04:33 +0000657 }
658
659 // Add unroll(disable) metadata to disable future unrolling.
660 LLVMContext &Context = L->getHeader()->getContext();
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000661 SmallVector<Metadata *, 1> DisableOperands;
Mark Heffernane6b4ba12014-07-23 17:31:37 +0000662 DisableOperands.push_back(MDString::get(Context, "llvm.loop.unroll.disable"));
Mark Heffernanf3764da2014-07-18 21:29:41 +0000663 MDNode *DisableNode = MDNode::get(Context, DisableOperands);
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000664 MDs.push_back(DisableNode);
Mark Heffernan053a6862014-07-18 21:04:33 +0000665
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000666 MDNode *NewLoopID = MDNode::get(Context, MDs);
Mark Heffernan053a6862014-07-18 21:04:33 +0000667 // Set operand 0 to refer to the loop id itself.
668 NewLoopID->replaceOperandWith(0, NewLoopID);
669 L->setLoopID(NewLoopID);
Mark Heffernan053a6862014-07-18 21:04:33 +0000670}
671
Dehao Chencc763442016-12-30 00:50:28 +0000672// Computes the boosting factor for complete unrolling.
673// If fully unrolling the loop would save a lot of RolledDynamicCost, it would
674// be beneficial to fully unroll the loop even if unrolledcost is large. We
675// use (RolledDynamicCost / UnrolledCost) to model the unroll benefits to adjust
676// the unroll threshold.
677static unsigned getFullUnrollBoostingFactor(const EstimatedUnrollCost &Cost,
678 unsigned MaxPercentThresholdBoost) {
679 if (Cost.RolledDynamicCost >= UINT_MAX / 100)
680 return 100;
681 else if (Cost.UnrolledCost != 0)
682 // The boosting factor is RolledDynamicCost / UnrolledCost
683 return std::min(100 * Cost.RolledDynamicCost / Cost.UnrolledCost,
684 MaxPercentThresholdBoost);
685 else
686 return MaxPercentThresholdBoost;
Michael Zolotukhin8c681712015-05-12 17:20:03 +0000687}
688
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000689// Returns loop size estimation for unrolled loop.
690static uint64_t getUnrolledLoopSize(
691 unsigned LoopSize,
692 TargetTransformInfo::UnrollingPreferences &UP) {
693 assert(LoopSize >= UP.BEInsns && "LoopSize should not be less than BEInsns!");
694 return (uint64_t)(LoopSize - UP.BEInsns) * UP.Count + UP.BEInsns;
695}
696
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000697// Returns true if unroll count was set explicitly.
698// Calculates unroll count and writes it to UP.Count.
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000699static bool computeUnrollCount(
700 Loop *L, const TargetTransformInfo &TTI, DominatorTree &DT, LoopInfo *LI,
701 ScalarEvolution *SE, OptimizationRemarkEmitter *ORE, unsigned &TripCount,
702 unsigned MaxTripCount, unsigned &TripMultiple, unsigned LoopSize,
703 TargetTransformInfo::UnrollingPreferences &UP, bool &UseUpperBound) {
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000704 // Check for explicit Count.
705 // 1st priority is unroll count set by "unroll-count" option.
706 bool UserUnrollCount = UnrollCount.getNumOccurrences() > 0;
707 if (UserUnrollCount) {
708 UP.Count = UnrollCount;
709 UP.AllowExpensiveTripCount = true;
710 UP.Force = true;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000711 if (UP.AllowRemainder && getUnrolledLoopSize(LoopSize, UP) < UP.Threshold)
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000712 return true;
713 }
714
715 // 2nd priority is unroll count set by pragma.
716 unsigned PragmaCount = UnrollCountPragmaValue(L);
717 if (PragmaCount > 0) {
718 UP.Count = PragmaCount;
719 UP.Runtime = true;
720 UP.AllowExpensiveTripCount = true;
721 UP.Force = true;
722 if (UP.AllowRemainder &&
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000723 getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold)
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000724 return true;
725 }
726 bool PragmaFullUnroll = HasUnrollFullPragma(L);
727 if (PragmaFullUnroll && TripCount != 0) {
728 UP.Count = TripCount;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000729 if (getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold)
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000730 return false;
731 }
732
733 bool PragmaEnableUnroll = HasUnrollEnablePragma(L);
734 bool ExplicitUnroll = PragmaCount > 0 || PragmaFullUnroll ||
735 PragmaEnableUnroll || UserUnrollCount;
736
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000737 if (ExplicitUnroll && TripCount != 0) {
738 // If the loop has an unrolling pragma, we want to be more aggressive with
739 // unrolling limits. Set thresholds to at least the PragmaThreshold value
740 // which is larger than the default limits.
741 UP.Threshold = std::max<unsigned>(UP.Threshold, PragmaUnrollThreshold);
742 UP.PartialThreshold =
743 std::max<unsigned>(UP.PartialThreshold, PragmaUnrollThreshold);
744 }
745
746 // 3rd priority is full unroll count.
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000747 // Full unroll makes sense only when TripCount or its upper bound could be
748 // statically calculated.
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000749 // Also we need to check if we exceed FullUnrollMaxCount.
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000750 // If using the upper bound to unroll, TripMultiple should be set to 1 because
751 // we do not know when loop may exit.
752 // MaxTripCount and ExactTripCount cannot both be non zero since we only
753 // compute the former when the latter is zero.
754 unsigned ExactTripCount = TripCount;
755 assert((ExactTripCount == 0 || MaxTripCount == 0) &&
756 "ExtractTripCound and MaxTripCount cannot both be non zero.");
757 unsigned FullUnrollTripCount = ExactTripCount ? ExactTripCount : MaxTripCount;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000758 UP.Count = FullUnrollTripCount;
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000759 if (FullUnrollTripCount && FullUnrollTripCount <= UP.FullUnrollMaxCount) {
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000760 // When computing the unrolled size, note that BEInsns are not replicated
761 // like the rest of the loop body.
Dehao Chencc763442016-12-30 00:50:28 +0000762 if (getUnrolledLoopSize(LoopSize, UP) < UP.Threshold) {
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000763 UseUpperBound = (MaxTripCount == FullUnrollTripCount);
764 TripCount = FullUnrollTripCount;
765 TripMultiple = UP.UpperBound ? 1 : TripMultiple;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000766 return ExplicitUnroll;
767 } else {
768 // The loop isn't that small, but we still can fully unroll it if that
769 // helps to remove a significant number of instructions.
770 // To check that, run additional analysis on the loop.
771 if (Optional<EstimatedUnrollCost> Cost = analyzeLoopUnrollCost(
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000772 L, FullUnrollTripCount, DT, *SE, TTI,
Dehao Chencc763442016-12-30 00:50:28 +0000773 UP.Threshold * UP.MaxPercentThresholdBoost / 100)) {
774 unsigned Boost =
775 getFullUnrollBoostingFactor(*Cost, UP.MaxPercentThresholdBoost);
776 if (Cost->UnrolledCost < UP.Threshold * Boost / 100) {
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000777 UseUpperBound = (MaxTripCount == FullUnrollTripCount);
778 TripCount = FullUnrollTripCount;
779 TripMultiple = UP.UpperBound ? 1 : TripMultiple;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000780 return ExplicitUnroll;
781 }
Dehao Chencc763442016-12-30 00:50:28 +0000782 }
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000783 }
784 }
785
786 // 4rd priority is partial unrolling.
787 // Try partial unroll only when TripCount could be staticaly calculated.
788 if (TripCount) {
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000789 UP.Partial |= ExplicitUnroll;
790 if (!UP.Partial) {
791 DEBUG(dbgs() << " will not try to unroll partially because "
792 << "-unroll-allow-partial not given\n");
793 UP.Count = 0;
794 return false;
795 }
Haicheng Wu430b3e42016-10-27 18:40:02 +0000796 if (UP.Count == 0)
797 UP.Count = TripCount;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000798 if (UP.PartialThreshold != NoThreshold) {
799 // Reduce unroll count to be modulo of TripCount for partial unrolling.
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000800 if (getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold)
801 UP.Count =
802 (std::max(UP.PartialThreshold, UP.BEInsns + 1) - UP.BEInsns) /
803 (LoopSize - UP.BEInsns);
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000804 if (UP.Count > UP.MaxCount)
805 UP.Count = UP.MaxCount;
806 while (UP.Count != 0 && TripCount % UP.Count != 0)
807 UP.Count--;
808 if (UP.AllowRemainder && UP.Count <= 1) {
809 // If there is no Count that is modulo of TripCount, set Count to
810 // largest power-of-two factor that satisfies the threshold limit.
811 // As we'll create fixup loop, do the type of unrolling only if
812 // remainder loop is allowed.
Jonas Paulsson58c5a7f2016-09-28 09:41:38 +0000813 UP.Count = UP.DefaultUnrollRuntimeCount;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000814 while (UP.Count != 0 &&
815 getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold)
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000816 UP.Count >>= 1;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000817 }
818 if (UP.Count < 2) {
819 if (PragmaEnableUnroll)
Adam Nemetf57cc622016-09-30 03:44:16 +0000820 ORE->emit(
821 OptimizationRemarkMissed(DEBUG_TYPE, "UnrollAsDirectedTooLarge",
822 L->getStartLoc(), L->getHeader())
823 << "Unable to unroll loop as directed by unroll(enable) pragma "
824 "because unrolled size is too large.");
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000825 UP.Count = 0;
826 }
827 } else {
828 UP.Count = TripCount;
829 }
830 if ((PragmaFullUnroll || PragmaEnableUnroll) && TripCount &&
831 UP.Count != TripCount)
Adam Nemetf57cc622016-09-30 03:44:16 +0000832 ORE->emit(
833 OptimizationRemarkMissed(DEBUG_TYPE, "FullUnrollAsDirectedTooLarge",
834 L->getStartLoc(), L->getHeader())
835 << "Unable to fully unroll loop as directed by unroll pragma because "
836 "unrolled size is too large.");
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000837 return ExplicitUnroll;
838 }
839 assert(TripCount == 0 &&
840 "All cases when TripCount is constant should be covered here.");
841 if (PragmaFullUnroll)
Adam Nemetf57cc622016-09-30 03:44:16 +0000842 ORE->emit(
843 OptimizationRemarkMissed(DEBUG_TYPE,
844 "CantFullUnrollAsDirectedRuntimeTripCount",
845 L->getStartLoc(), L->getHeader())
846 << "Unable to fully unroll loop as directed by unroll(full) pragma "
847 "because loop has a runtime trip count.");
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000848
Michael Kupersteinb151a642016-11-30 21:13:57 +0000849 // 5th priority is loop peeling
850 computePeelCount(L, LoopSize, UP);
851 if (UP.PeelCount) {
852 UP.Runtime = false;
853 UP.Count = 1;
854 return ExplicitUnroll;
855 }
856
857 // 6th priority is runtime unrolling.
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000858 // Don't unroll a runtime trip count loop when it is disabled.
859 if (HasRuntimeUnrollDisablePragma(L)) {
860 UP.Count = 0;
861 return false;
862 }
Michael Kupersteinb151a642016-11-30 21:13:57 +0000863
864 // Check if the runtime trip count is too small when profile is available.
865 if (L->getHeader()->getParent()->getEntryCount()) {
866 if (auto ProfileTripCount = getLoopEstimatedTripCount(L)) {
867 if (*ProfileTripCount < FlatLoopTripCountThreshold)
868 return false;
869 else
870 UP.AllowExpensiveTripCount = true;
871 }
872 }
873
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000874 // Reduce count based on the type of unrolling and the threshold values.
875 UP.Runtime |= PragmaEnableUnroll || PragmaCount > 0 || UserUnrollCount;
876 if (!UP.Runtime) {
877 DEBUG(dbgs() << " will not try to unroll loop with runtime trip count "
878 << "-unroll-runtime not given\n");
879 UP.Count = 0;
880 return false;
881 }
882 if (UP.Count == 0)
Jonas Paulsson58c5a7f2016-09-28 09:41:38 +0000883 UP.Count = UP.DefaultUnrollRuntimeCount;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000884
885 // Reduce unroll count to be the largest power-of-two factor of
886 // the original count which satisfies the threshold limit.
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000887 while (UP.Count != 0 &&
888 getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold)
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000889 UP.Count >>= 1;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000890
Evgeny Stupachenkob7875222016-05-28 00:14:58 +0000891#ifndef NDEBUG
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000892 unsigned OrigCount = UP.Count;
Evgeny Stupachenkob7875222016-05-28 00:14:58 +0000893#endif
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000894
895 if (!UP.AllowRemainder && UP.Count != 0 && (TripMultiple % UP.Count) != 0) {
896 while (UP.Count != 0 && TripMultiple % UP.Count != 0)
897 UP.Count >>= 1;
898 DEBUG(dbgs() << "Remainder loop is restricted (that could architecture "
899 "specific or because the loop contains a convergent "
900 "instruction), so unroll count must divide the trip "
901 "multiple, "
902 << TripMultiple << ". Reducing unroll count from "
903 << OrigCount << " to " << UP.Count << ".\n");
Adam Nemetf57cc622016-09-30 03:44:16 +0000904 using namespace ore;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000905 if (PragmaCount > 0 && !UP.AllowRemainder)
Adam Nemetf57cc622016-09-30 03:44:16 +0000906 ORE->emit(
907 OptimizationRemarkMissed(DEBUG_TYPE,
908 "DifferentUnrollCountFromDirected",
909 L->getStartLoc(), L->getHeader())
910 << "Unable to unroll loop the number of times directed by "
911 "unroll_count pragma because remainder loop is restricted "
912 "(that could architecture specific or because the loop "
913 "contains a convergent instruction) and so must have an unroll "
914 "count that divides the loop trip multiple of "
915 << NV("TripMultiple", TripMultiple) << ". Unrolling instead "
916 << NV("UnrollCount", UP.Count) << " time(s).");
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000917 }
918
919 if (UP.Count > UP.MaxCount)
920 UP.Count = UP.MaxCount;
921 DEBUG(dbgs() << " partially unrolling with count: " << UP.Count << "\n");
922 if (UP.Count < 2)
923 UP.Count = 0;
924 return ExplicitUnroll;
925}
926
Justin Bognerb8d82ab2016-01-12 05:21:37 +0000927static bool tryToUnrollLoop(Loop *L, DominatorTree &DT, LoopInfo *LI,
928 ScalarEvolution *SE, const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000929 AssumptionCache &AC, OptimizationRemarkEmitter &ORE,
Adam Nemet12937c32016-07-29 19:29:47 +0000930 bool PreserveLCSSA,
Justin Bognerb8d82ab2016-01-12 05:21:37 +0000931 Optional<unsigned> ProvidedCount,
932 Optional<unsigned> ProvidedThreshold,
933 Optional<bool> ProvidedAllowPartial,
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000934 Optional<bool> ProvidedRuntime,
935 Optional<bool> ProvidedUpperBound) {
Evgeny Stupachenkob7875222016-05-28 00:14:58 +0000936 DEBUG(dbgs() << "Loop Unroll: F[" << L->getHeader()->getParent()->getName()
937 << "] Loop %" << L->getHeader()->getName() << "\n");
Haicheng Wu731b04c2016-11-23 19:39:26 +0000938 if (HasUnrollDisablePragma(L))
939 return false;
940 if (!L->isLoopSimplifyForm()) {
941 DEBUG(
942 dbgs() << " Not unrolling loop which is not in loop-simplify form.\n");
Eli Benderskyff903242014-06-16 23:53:02 +0000943 return false;
944 }
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000945
946 unsigned NumInlineCandidates;
947 bool NotDuplicatable;
948 bool Convergent;
Evgeny Stupachenkoc2698cd2016-11-09 19:56:39 +0000949 TargetTransformInfo::UnrollingPreferences UP = gatherUnrollingPreferences(
950 L, TTI, ProvidedThreshold, ProvidedCount, ProvidedAllowPartial,
951 ProvidedRuntime, ProvidedUpperBound);
Haicheng Wu731b04c2016-11-23 19:39:26 +0000952 // Exit early if unrolling is disabled.
953 if (UP.Threshold == 0 && (!UP.Partial || UP.PartialThreshold == 0))
954 return false;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000955 unsigned LoopSize = ApproximateLoopSize(
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000956 L, NumInlineCandidates, NotDuplicatable, Convergent, TTI, &AC, UP.BEInsns);
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000957 DEBUG(dbgs() << " Loop Size = " << LoopSize << "\n");
958 if (NotDuplicatable) {
959 DEBUG(dbgs() << " Not unrolling loop which contains non-duplicatable"
960 << " instructions.\n");
961 return false;
962 }
963 if (NumInlineCandidates != 0) {
964 DEBUG(dbgs() << " Not unrolling loop with inlinable calls.\n");
965 return false;
966 }
Andrew Trick279e7a62011-07-23 00:29:16 +0000967
Andrew Trick2b6860f2011-08-11 23:36:16 +0000968 // Find trip count and trip multiple if count is not available
969 unsigned TripCount = 0;
Haicheng Wu1ef17e92016-10-12 21:29:38 +0000970 unsigned MaxTripCount = 0;
Andrew Trick1cabe542011-07-23 00:33:05 +0000971 unsigned TripMultiple = 1;
Chandler Carruth6666c272014-10-11 00:12:11 +0000972 // If there are multiple exiting blocks but one of them is the latch, use the
973 // latch for the trip count estimation. Otherwise insist on a single exiting
974 // block for the trip count estimation.
975 BasicBlock *ExitingBlock = L->getLoopLatch();
976 if (!ExitingBlock || !L->isLoopExiting(ExitingBlock))
977 ExitingBlock = L->getExitingBlock();
978 if (ExitingBlock) {
979 TripCount = SE->getSmallConstantTripCount(L, ExitingBlock);
980 TripMultiple = SE->getSmallConstantTripMultiple(L, ExitingBlock);
Andrew Trick2b6860f2011-08-11 23:36:16 +0000981 }
Hal Finkel8f2e7002013-09-11 19:25:43 +0000982
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +0000983 // If the loop contains a convergent operation, the prelude we'd add
984 // to do the first few instructions before we hit the unrolled loop
985 // is unsafe -- it adds a control-flow dependency to the convergent
986 // operation. Therefore restrict remainder loop (try unrollig without).
987 //
988 // TODO: This is quite conservative. In practice, convergent_op()
989 // is likely to be called unconditionally in the loop. In this
990 // case, the program would be ill-formed (on most architectures)
991 // unless n were the same on all threads in a thread group.
992 // Assuming n is the same on all threads, any kind of unrolling is
993 // safe. But currently llvm's notion of convergence isn't powerful
994 // enough to express this.
995 if (Convergent)
996 UP.AllowRemainder = false;
Eli Benderskydc6de2c2014-06-12 18:05:39 +0000997
John Brawn84b21832016-10-21 11:08:48 +0000998 // Try to find the trip count upper bound if we cannot find the exact trip
999 // count.
1000 bool MaxOrZero = false;
1001 if (!TripCount) {
1002 MaxTripCount = SE->getSmallConstantMaxTripCount(L);
1003 MaxOrZero = SE->isBackedgeTakenCountMaxOrZero(L);
1004 // We can unroll by the upper bound amount if it's generally allowed or if
1005 // we know that the loop is executed either the upper bound or zero times.
1006 // (MaxOrZero unrolling keeps only the first loop test, so the number of
1007 // loop tests remains the same compared to the non-unrolled version, whereas
1008 // the generic upper bound unrolling keeps all but the last loop test so the
1009 // number of loop tests goes up which may end up being worse on targets with
1010 // constriained branch predictor resources so is controlled by an option.)
1011 // In addition we only unroll small upper bounds.
1012 if (!(UP.UpperBound || MaxOrZero) || MaxTripCount > UnrollMaxUpperBound) {
1013 MaxTripCount = 0;
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001014 }
1015 }
1016
1017 // computeUnrollCount() decides whether it is beneficial to use upper bound to
1018 // fully unroll the loop.
1019 bool UseUpperBound = false;
1020 bool IsCountSetExplicitly =
1021 computeUnrollCount(L, TTI, DT, LI, SE, &ORE, TripCount, MaxTripCount,
1022 TripMultiple, LoopSize, UP, UseUpperBound);
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +00001023 if (!UP.Count)
Eli Benderskyff903242014-06-16 23:53:02 +00001024 return false;
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +00001025 // Unroll factor (Count) must be less or equal to TripCount.
1026 if (TripCount && UP.Count > TripCount)
1027 UP.Count = TripCount;
Dan Gohman2980d9d2007-05-11 20:53:41 +00001028
Dan Gohman3dc2d922008-05-14 00:24:14 +00001029 // Unroll the loop.
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +00001030 if (!UnrollLoop(L, UP.Count, TripCount, UP.Force, UP.Runtime,
John Brawn84b21832016-10-21 11:08:48 +00001031 UP.AllowExpensiveTripCount, UseUpperBound, MaxOrZero,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001032 TripMultiple, UP.PeelCount, LI, SE, &DT, &AC, &ORE,
Michael Kupersteinb151a642016-11-30 21:13:57 +00001033 PreserveLCSSA))
Dan Gohman3dc2d922008-05-14 00:24:14 +00001034 return false;
Dan Gohman2980d9d2007-05-11 20:53:41 +00001035
Evgeny Stupachenkoea2aef42016-05-27 23:15:06 +00001036 // If loop has an unroll count pragma or unrolled by explicitly set count
1037 // mark loop as unrolled to prevent unrolling beyond that requested.
Michael Kupersteinb151a642016-11-30 21:13:57 +00001038 // If the loop was peeled, we already "used up" the profile information
1039 // we had, so we don't want to unroll or peel again.
1040 if (IsCountSetExplicitly || UP.PeelCount)
David L Kreitzer8d441eb2016-03-25 14:24:52 +00001041 SetLoopAlreadyUnrolled(L);
Michael Kupersteinb151a642016-11-30 21:13:57 +00001042
Chris Lattner946b2552004-04-18 05:20:17 +00001043 return true;
1044}
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001045
1046namespace {
1047class LoopUnroll : public LoopPass {
1048public:
1049 static char ID; // Pass ID, replacement for typeid
1050 LoopUnroll(Optional<unsigned> Threshold = None,
1051 Optional<unsigned> Count = None,
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001052 Optional<bool> AllowPartial = None, Optional<bool> Runtime = None,
1053 Optional<bool> UpperBound = None)
Benjamin Kramer82de7d32016-05-27 14:27:24 +00001054 : LoopPass(ID), ProvidedCount(std::move(Count)),
1055 ProvidedThreshold(Threshold), ProvidedAllowPartial(AllowPartial),
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001056 ProvidedRuntime(Runtime), ProvidedUpperBound(UpperBound) {
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001057 initializeLoopUnrollPass(*PassRegistry::getPassRegistry());
1058 }
1059
1060 Optional<unsigned> ProvidedCount;
1061 Optional<unsigned> ProvidedThreshold;
1062 Optional<bool> ProvidedAllowPartial;
1063 Optional<bool> ProvidedRuntime;
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001064 Optional<bool> ProvidedUpperBound;
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001065
1066 bool runOnLoop(Loop *L, LPPassManager &) override {
Andrew Kayloraa641a52016-04-22 22:06:11 +00001067 if (skipLoop(L))
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001068 return false;
1069
1070 Function &F = *L->getHeader()->getParent();
1071
1072 auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
1073 LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
1074 ScalarEvolution *SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE();
1075 const TargetTransformInfo &TTI =
1076 getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001077 auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
Adam Nemet4f155b62016-08-26 15:58:34 +00001078 // For the old PM, we can't use OptimizationRemarkEmitter as an analysis
1079 // pass. Function analyses need to be preserved across loop transformations
1080 // but ORE cannot be preserved (see comment before the pass definition).
1081 OptimizationRemarkEmitter ORE(&F);
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001082 bool PreserveLCSSA = mustPreserveAnalysisID(LCSSAID);
1083
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001084 return tryToUnrollLoop(L, DT, LI, SE, TTI, AC, ORE, PreserveLCSSA,
Adam Nemet12937c32016-07-29 19:29:47 +00001085 ProvidedCount, ProvidedThreshold,
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001086 ProvidedAllowPartial, ProvidedRuntime,
1087 ProvidedUpperBound);
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001088 }
1089
1090 /// This transformation requires natural loop information & requires that
1091 /// loop preheaders be inserted into the CFG...
1092 ///
1093 void getAnalysisUsage(AnalysisUsage &AU) const override {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001094 AU.addRequired<AssumptionCacheTracker>();
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001095 AU.addRequired<TargetTransformInfoWrapperPass>();
Chandler Carruth31088a92016-02-19 10:45:18 +00001096 // FIXME: Loop passes are required to preserve domtree, and for now we just
1097 // recreate dom info if anything gets unrolled.
1098 getLoopAnalysisUsage(AU);
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001099 }
1100};
1101}
1102
1103char LoopUnroll::ID = 0;
1104INITIALIZE_PASS_BEGIN(LoopUnroll, "loop-unroll", "Unroll loops", false, false)
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001105INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruth31088a92016-02-19 10:45:18 +00001106INITIALIZE_PASS_DEPENDENCY(LoopPass)
1107INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001108INITIALIZE_PASS_END(LoopUnroll, "loop-unroll", "Unroll loops", false, false)
1109
1110Pass *llvm::createLoopUnrollPass(int Threshold, int Count, int AllowPartial,
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001111 int Runtime, int UpperBound) {
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001112 // TODO: It would make more sense for this function to take the optionals
1113 // directly, but that's dangerous since it would silently break out of tree
1114 // callers.
1115 return new LoopUnroll(Threshold == -1 ? None : Optional<unsigned>(Threshold),
1116 Count == -1 ? None : Optional<unsigned>(Count),
1117 AllowPartial == -1 ? None
1118 : Optional<bool>(AllowPartial),
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001119 Runtime == -1 ? None : Optional<bool>(Runtime),
1120 UpperBound == -1 ? None : Optional<bool>(UpperBound));
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001121}
1122
1123Pass *llvm::createSimpleLoopUnrollPass() {
Haicheng Wu1ef17e92016-10-12 21:29:38 +00001124 return llvm::createLoopUnrollPass(-1, -1, 0, 0, 0);
Justin Bognerb8d82ab2016-01-12 05:21:37 +00001125}
Sean Silvae3c18a52016-07-19 23:54:23 +00001126
Chandler Carruth410eaeb2017-01-11 06:23:21 +00001127PreservedAnalyses LoopUnrollPass::run(Loop &L, LoopAnalysisManager &AM,
1128 LoopStandardAnalysisResults &AR,
Chandler Carruthce40fa12017-01-25 02:49:01 +00001129 LPMUpdater &Updater) {
Sean Silvae3c18a52016-07-19 23:54:23 +00001130 const auto &FAM =
Chandler Carruth410eaeb2017-01-11 06:23:21 +00001131 AM.getResult<FunctionAnalysisManagerLoopProxy>(L, AR).getManager();
Sean Silvae3c18a52016-07-19 23:54:23 +00001132 Function *F = L.getHeader()->getParent();
1133
Adam Nemet12937c32016-07-29 19:29:47 +00001134 auto *ORE = FAM.getCachedResult<OptimizationRemarkEmitterAnalysis>(*F);
Chandler Carruth410eaeb2017-01-11 06:23:21 +00001135 // FIXME: This should probably be optional rather than required.
Adam Nemet12937c32016-07-29 19:29:47 +00001136 if (!ORE)
1137 report_fatal_error("LoopUnrollPass: OptimizationRemarkEmitterAnalysis not "
1138 "cached at a higher level");
Sean Silvae3c18a52016-07-19 23:54:23 +00001139
Chandler Carruthce40fa12017-01-25 02:49:01 +00001140 // Keep track of the previous loop structure so we can identify new loops
1141 // created by unrolling.
1142 Loop *ParentL = L.getParentLoop();
1143 SmallPtrSet<Loop *, 4> OldLoops;
1144 if (ParentL)
1145 OldLoops.insert(ParentL->begin(), ParentL->end());
1146 else
1147 OldLoops.insert(AR.LI.begin(), AR.LI.end());
1148
Chandler Carruth410eaeb2017-01-11 06:23:21 +00001149 bool Changed = tryToUnrollLoop(&L, AR.DT, &AR.LI, &AR.SE, AR.TTI, AR.AC, *ORE,
1150 /*PreserveLCSSA*/ true, ProvidedCount,
1151 ProvidedThreshold, ProvidedAllowPartial,
1152 ProvidedRuntime, ProvidedUpperBound);
Sean Silvae3c18a52016-07-19 23:54:23 +00001153 if (!Changed)
1154 return PreservedAnalyses::all();
Chandler Carruthca68a3e2017-01-15 06:32:49 +00001155
Chandler Carruthce40fa12017-01-25 02:49:01 +00001156 // The parent must not be damaged by unrolling!
1157#ifndef NDEBUG
1158 if (ParentL)
1159 ParentL->verifyLoop();
1160#endif
1161
1162 // Unrolling can do several things to introduce new loops into a loop nest:
1163 // - Partial unrolling clones child loops within the current loop.
1164 // - Full unrolling clones child loops within the current loop but then
1165 // removes the current loop making all of the children appear to be new
1166 // sibling loops.
1167 // - Loop peeling can directly introduce new sibling loops by peeling one
1168 // iteration.
1169 //
1170 // When a new loop appears as a sibling loop, either from peeling an
1171 // iteration or fully unrolling, its nesting structure has fundamentally
1172 // changed and we want to revisit it to reflect that.
1173 //
1174 // When unrolling has removed the current loop, we need to tell the
1175 // infrastructure that it is gone.
1176 //
1177 // Finally, we support a debugging/testing mode where we revisit child loops
1178 // as well. These are not expected to require further optimizations as either
1179 // they or the loop they were cloned from have been directly visited already.
1180 // But the debugging mode allows us to check this assumption.
1181 bool IsCurrentLoopValid = false;
1182 SmallVector<Loop *, 4> SibLoops;
1183 if (ParentL)
1184 SibLoops.append(ParentL->begin(), ParentL->end());
1185 else
1186 SibLoops.append(AR.LI.begin(), AR.LI.end());
1187 erase_if(SibLoops, [&](Loop *SibLoop) {
1188 if (SibLoop == &L) {
1189 IsCurrentLoopValid = true;
1190 return true;
1191 }
1192
1193 // Otherwise erase the loop from the list if it was in the old loops.
1194 return OldLoops.count(SibLoop) != 0;
1195 });
1196 Updater.addSiblingLoops(SibLoops);
1197
1198 if (!IsCurrentLoopValid) {
1199 Updater.markLoopAsDeleted(L);
1200 } else {
1201 // We can only walk child loops if the current loop remained valid.
1202 if (UnrollRevisitChildLoops) {
1203 // Walk *all* of the child loops. This is a highly speculative mode
1204 // anyways so look for any simplifications that arose from partial
1205 // unrolling or peeling off of iterations.
1206 SmallVector<Loop *, 4> ChildLoops(L.begin(), L.end());
1207 Updater.addChildLoops(ChildLoops);
1208 }
1209 }
1210
Sean Silvae3c18a52016-07-19 23:54:23 +00001211 return getLoopPassPreservedAnalyses();
1212}