Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1 | //===-- LoopUnroll.cpp - Loop unroller pass -------------------------------===// |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements a simple loop unroller. It works best when loops have |
| 11 | // been canonicalized by the -indvars pass, allowing it to determine the trip |
| 12 | // counts of loops easily. |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 15 | #include "llvm/Transforms/Scalar/LoopUnrollPass.h" |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/SetVector.h" |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 17 | #include "llvm/Analysis/AssumptionCache.h" |
Chris Lattner | 679572e | 2011-01-02 07:35:53 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/CodeMetrics.h" |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 19 | #include "llvm/Analysis/GlobalsModRef.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/InstructionSimplify.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 21 | #include "llvm/Analysis/LoopPass.h" |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/LoopPassManager.h" |
Michael Zolotukhin | 1da4afd | 2016-02-08 23:03:59 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/LoopUnrollAnalyzer.h" |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/OptimizationDiagnosticInfo.h" |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 25 | #include "llvm/Analysis/ScalarEvolution.h" |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 26 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 27 | #include "llvm/IR/DataLayout.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 28 | #include "llvm/IR/Dominators.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 29 | #include "llvm/IR/InstVisitor.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 30 | #include "llvm/IR/IntrinsicInst.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 31 | #include "llvm/IR/Metadata.h" |
Reid Spencer | 7c16caa | 2004-09-01 22:55:40 +0000 | [diff] [blame] | 32 | #include "llvm/Support/CommandLine.h" |
| 33 | #include "llvm/Support/Debug.h" |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 34 | #include "llvm/Support/raw_ostream.h" |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 35 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 36 | #include "llvm/Transforms/Utils/LoopUtils.h" |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 37 | #include "llvm/Transforms/Utils/UnrollLoop.h" |
Duncan Sands | 67933e6 | 2008-05-16 09:30:00 +0000 | [diff] [blame] | 38 | #include <climits> |
Benjamin Kramer | 82de7d3 | 2016-05-27 14:27:24 +0000 | [diff] [blame] | 39 | #include <utility> |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 40 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 41 | using namespace llvm; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 42 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 43 | #define DEBUG_TYPE "loop-unroll" |
| 44 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 45 | static cl::opt<unsigned> |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 46 | UnrollThreshold("unroll-threshold", cl::Hidden, |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 47 | cl::desc("The baseline cost threshold for loop unrolling")); |
| 48 | |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 49 | static cl::opt<unsigned> UnrollMaxPercentThresholdBoost( |
| 50 | "unroll-max-percent-threshold-boost", cl::init(400), cl::Hidden, |
| 51 | cl::desc("The maximum 'boost' (represented as a percentage >= 100) applied " |
| 52 | "to the threshold when aggressively unrolling a loop due to the " |
| 53 | "dynamic cost savings. If completely unrolling a loop will reduce " |
| 54 | "the total runtime from X to Y, we boost the loop unroll " |
| 55 | "threshold to DefaultThreshold*std::min(MaxPercentThresholdBoost, " |
| 56 | "X/Y). This limit avoids excessive code bloat.")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 57 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 58 | static cl::opt<unsigned> UnrollMaxIterationsCountToAnalyze( |
Michael Zolotukhin | 8f7a242 | 2016-05-24 23:00:05 +0000 | [diff] [blame] | 59 | "unroll-max-iteration-count-to-analyze", cl::init(10), cl::Hidden, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 60 | cl::desc("Don't allow loop unrolling to simulate more than this number of" |
| 61 | "iterations when checking full unroll profitability")); |
| 62 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 63 | static cl::opt<unsigned> UnrollCount( |
| 64 | "unroll-count", cl::Hidden, |
| 65 | cl::desc("Use this unroll count for all loops including those with " |
| 66 | "unroll_count pragma values, for testing purposes")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 67 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 68 | static cl::opt<unsigned> UnrollMaxCount( |
| 69 | "unroll-max-count", cl::Hidden, |
| 70 | cl::desc("Set the max unroll count for partial and runtime unrolling, for" |
| 71 | "testing purposes")); |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 72 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 73 | static cl::opt<unsigned> UnrollFullMaxCount( |
| 74 | "unroll-full-max-count", cl::Hidden, |
| 75 | cl::desc( |
| 76 | "Set the max unroll count for full unrolling, for testing purposes")); |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 77 | |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 78 | static cl::opt<bool> |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 79 | UnrollAllowPartial("unroll-allow-partial", cl::Hidden, |
| 80 | cl::desc("Allows loops to be partially unrolled until " |
| 81 | "-unroll-threshold loop size is reached.")); |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 82 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 83 | static cl::opt<bool> UnrollAllowRemainder( |
| 84 | "unroll-allow-remainder", cl::Hidden, |
| 85 | cl::desc("Allow generation of a loop remainder (extra iterations) " |
| 86 | "when unrolling a loop.")); |
| 87 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 88 | static cl::opt<bool> |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 89 | UnrollRuntime("unroll-runtime", cl::ZeroOrMore, cl::Hidden, |
| 90 | cl::desc("Unroll loops with run-time trip counts")); |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 91 | |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 92 | static cl::opt<unsigned> UnrollMaxUpperBound( |
| 93 | "unroll-max-upperbound", cl::init(8), cl::Hidden, |
| 94 | cl::desc( |
| 95 | "The max of trip count upper bound that is considered in unrolling")); |
| 96 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 97 | static cl::opt<unsigned> PragmaUnrollThreshold( |
| 98 | "pragma-unroll-threshold", cl::init(16 * 1024), cl::Hidden, |
| 99 | cl::desc("Unrolled size limit for loops with an unroll(full) or " |
| 100 | "unroll_count pragma.")); |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 101 | |
Dehao Chen | 41d72a8 | 2016-11-17 01:17:02 +0000 | [diff] [blame] | 102 | static cl::opt<unsigned> FlatLoopTripCountThreshold( |
| 103 | "flat-loop-tripcount-threshold", cl::init(5), cl::Hidden, |
| 104 | cl::desc("If the runtime tripcount for the loop is lower than the " |
| 105 | "threshold, the loop is considered as flat and will be less " |
| 106 | "aggressively unrolled.")); |
| 107 | |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 108 | static cl::opt<bool> |
| 109 | UnrollAllowPeeling("unroll-allow-peeling", cl::Hidden, |
| 110 | cl::desc("Allows loops to be peeled when the dynamic " |
| 111 | "trip count is known to be low.")); |
| 112 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 113 | /// A magic value for use with the Threshold parameter to indicate |
| 114 | /// that the loop unroll should be performed regardless of how much |
| 115 | /// code expansion would result. |
| 116 | static const unsigned NoThreshold = UINT_MAX; |
| 117 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 118 | /// Gather the various unrolling parameters based on the defaults, compiler |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 119 | /// flags, TTI overrides and user specified parameters. |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 120 | static TargetTransformInfo::UnrollingPreferences gatherUnrollingPreferences( |
| 121 | Loop *L, const TargetTransformInfo &TTI, Optional<unsigned> UserThreshold, |
| 122 | Optional<unsigned> UserCount, Optional<bool> UserAllowPartial, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 123 | Optional<bool> UserRuntime, Optional<bool> UserUpperBound) { |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 124 | TargetTransformInfo::UnrollingPreferences UP; |
| 125 | |
| 126 | // Set up the defaults |
| 127 | UP.Threshold = 150; |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 128 | UP.MaxPercentThresholdBoost = 400; |
Hans Wennborg | 719b26b | 2016-05-10 21:45:55 +0000 | [diff] [blame] | 129 | UP.OptSizeThreshold = 0; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 130 | UP.PartialThreshold = UP.Threshold; |
Hans Wennborg | 719b26b | 2016-05-10 21:45:55 +0000 | [diff] [blame] | 131 | UP.PartialOptSizeThreshold = 0; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 132 | UP.Count = 0; |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 133 | UP.PeelCount = 0; |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 134 | UP.DefaultUnrollRuntimeCount = 8; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 135 | UP.MaxCount = UINT_MAX; |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 136 | UP.FullUnrollMaxCount = UINT_MAX; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 137 | UP.BEInsns = 2; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 138 | UP.Partial = false; |
| 139 | UP.Runtime = false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 140 | UP.AllowRemainder = true; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 141 | UP.AllowExpensiveTripCount = false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 142 | UP.Force = false; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 143 | UP.UpperBound = false; |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 144 | UP.AllowPeeling = false; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 145 | |
| 146 | // Override with any target specific settings |
| 147 | TTI.getUnrollingPreferences(L, UP); |
| 148 | |
| 149 | // Apply size attributes |
| 150 | if (L->getHeader()->getParent()->optForSize()) { |
| 151 | UP.Threshold = UP.OptSizeThreshold; |
| 152 | UP.PartialThreshold = UP.PartialOptSizeThreshold; |
| 153 | } |
| 154 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 155 | // Apply any user values specified by cl::opt |
| 156 | if (UnrollThreshold.getNumOccurrences() > 0) { |
| 157 | UP.Threshold = UnrollThreshold; |
| 158 | UP.PartialThreshold = UnrollThreshold; |
| 159 | } |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 160 | if (UnrollMaxPercentThresholdBoost.getNumOccurrences() > 0) |
| 161 | UP.MaxPercentThresholdBoost = UnrollMaxPercentThresholdBoost; |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 162 | if (UnrollMaxCount.getNumOccurrences() > 0) |
| 163 | UP.MaxCount = UnrollMaxCount; |
| 164 | if (UnrollFullMaxCount.getNumOccurrences() > 0) |
| 165 | UP.FullUnrollMaxCount = UnrollFullMaxCount; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 166 | if (UnrollAllowPartial.getNumOccurrences() > 0) |
| 167 | UP.Partial = UnrollAllowPartial; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 168 | if (UnrollAllowRemainder.getNumOccurrences() > 0) |
| 169 | UP.AllowRemainder = UnrollAllowRemainder; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 170 | if (UnrollRuntime.getNumOccurrences() > 0) |
| 171 | UP.Runtime = UnrollRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 172 | if (UnrollMaxUpperBound == 0) |
| 173 | UP.UpperBound = false; |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 174 | if (UnrollAllowPeeling.getNumOccurrences() > 0) |
| 175 | UP.AllowPeeling = UnrollAllowPeeling; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 176 | |
| 177 | // Apply user values provided by argument |
| 178 | if (UserThreshold.hasValue()) { |
| 179 | UP.Threshold = *UserThreshold; |
| 180 | UP.PartialThreshold = *UserThreshold; |
| 181 | } |
| 182 | if (UserCount.hasValue()) |
| 183 | UP.Count = *UserCount; |
| 184 | if (UserAllowPartial.hasValue()) |
| 185 | UP.Partial = *UserAllowPartial; |
| 186 | if (UserRuntime.hasValue()) |
| 187 | UP.Runtime = *UserRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 188 | if (UserUpperBound.hasValue()) |
| 189 | UP.UpperBound = *UserUpperBound; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 190 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 191 | return UP; |
| 192 | } |
| 193 | |
Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 194 | namespace { |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 195 | /// A struct to densely store the state of an instruction after unrolling at |
| 196 | /// each iteration. |
| 197 | /// |
| 198 | /// This is designed to work like a tuple of <Instruction *, int> for the |
| 199 | /// purposes of hashing and lookup, but to be able to associate two boolean |
| 200 | /// states with each key. |
| 201 | struct UnrolledInstState { |
| 202 | Instruction *I; |
| 203 | int Iteration : 30; |
| 204 | unsigned IsFree : 1; |
| 205 | unsigned IsCounted : 1; |
| 206 | }; |
| 207 | |
| 208 | /// Hashing and equality testing for a set of the instruction states. |
| 209 | struct UnrolledInstStateKeyInfo { |
| 210 | typedef DenseMapInfo<Instruction *> PtrInfo; |
| 211 | typedef DenseMapInfo<std::pair<Instruction *, int>> PairInfo; |
| 212 | static inline UnrolledInstState getEmptyKey() { |
| 213 | return {PtrInfo::getEmptyKey(), 0, 0, 0}; |
| 214 | } |
| 215 | static inline UnrolledInstState getTombstoneKey() { |
| 216 | return {PtrInfo::getTombstoneKey(), 0, 0, 0}; |
| 217 | } |
| 218 | static inline unsigned getHashValue(const UnrolledInstState &S) { |
| 219 | return PairInfo::getHashValue({S.I, S.Iteration}); |
| 220 | } |
| 221 | static inline bool isEqual(const UnrolledInstState &LHS, |
| 222 | const UnrolledInstState &RHS) { |
| 223 | return PairInfo::isEqual({LHS.I, LHS.Iteration}, {RHS.I, RHS.Iteration}); |
| 224 | } |
| 225 | }; |
| 226 | } |
| 227 | |
| 228 | namespace { |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 229 | struct EstimatedUnrollCost { |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 230 | /// \brief The estimated cost after unrolling. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 231 | unsigned UnrolledCost; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 232 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 233 | /// \brief The estimated dynamic cost of executing the instructions in the |
| 234 | /// rolled form. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 235 | unsigned RolledDynamicCost; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 236 | }; |
| 237 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 238 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 239 | /// \brief Figure out if the loop is worth full unrolling. |
| 240 | /// |
| 241 | /// Complete loop unrolling can make some loads constant, and we need to know |
| 242 | /// if that would expose any further optimization opportunities. This routine |
Michael Zolotukhin | c4e4f33 | 2015-06-11 22:17:39 +0000 | [diff] [blame] | 243 | /// estimates this optimization. It computes cost of unrolled loop |
| 244 | /// (UnrolledCost) and dynamic cost of the original loop (RolledDynamicCost). By |
| 245 | /// dynamic cost we mean that we won't count costs of blocks that are known not |
| 246 | /// to be executed (i.e. if we have a branch in the loop and we know that at the |
| 247 | /// given iteration its condition would be resolved to true, we won't add up the |
| 248 | /// cost of the 'false'-block). |
| 249 | /// \returns Optional value, holding the RolledDynamicCost and UnrolledCost. If |
| 250 | /// the analysis failed (no benefits expected from the unrolling, or the loop is |
| 251 | /// too big to analyze), the returned value is None. |
Benjamin Kramer | fcdb1c1 | 2015-08-20 09:57:22 +0000 | [diff] [blame] | 252 | static Optional<EstimatedUnrollCost> |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 253 | analyzeLoopUnrollCost(const Loop *L, unsigned TripCount, DominatorTree &DT, |
| 254 | ScalarEvolution &SE, const TargetTransformInfo &TTI, |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 255 | unsigned MaxUnrolledLoopSize) { |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 256 | // We want to be able to scale offsets by the trip count and add more offsets |
| 257 | // to them without checking for overflows, and we already don't want to |
| 258 | // analyze *massive* trip counts, so we force the max to be reasonably small. |
| 259 | assert(UnrollMaxIterationsCountToAnalyze < (INT_MAX / 2) && |
| 260 | "The unroll iterations max is too large!"); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 261 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 262 | // Only analyze inner loops. We can't properly estimate cost of nested loops |
| 263 | // and we won't visit inner loops again anyway. |
| 264 | if (!L->empty()) |
| 265 | return None; |
| 266 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 267 | // Don't simulate loops with a big or unknown tripcount |
| 268 | if (!UnrollMaxIterationsCountToAnalyze || !TripCount || |
| 269 | TripCount > UnrollMaxIterationsCountToAnalyze) |
| 270 | return None; |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame] | 271 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 272 | SmallSetVector<BasicBlock *, 16> BBWorklist; |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 273 | SmallSetVector<std::pair<BasicBlock *, BasicBlock *>, 4> ExitWorklist; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 274 | DenseMap<Value *, Constant *> SimplifiedValues; |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 275 | SmallVector<std::pair<Value *, Constant *>, 4> SimplifiedInputValues; |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 276 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 277 | // The estimated cost of the unrolled form of the loop. We try to estimate |
| 278 | // this by simplifying as much as we can while computing the estimate. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 279 | unsigned UnrolledCost = 0; |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 280 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 281 | // We also track the estimated dynamic (that is, actually executed) cost in |
| 282 | // the rolled form. This helps identify cases when the savings from unrolling |
| 283 | // aren't just exposing dead control flows, but actual reduced dynamic |
| 284 | // instructions due to the simplifications which we expect to occur after |
| 285 | // unrolling. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 286 | unsigned RolledDynamicCost = 0; |
Chandler Carruth | 8c86375 | 2015-02-13 03:48:38 +0000 | [diff] [blame] | 287 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 288 | // We track the simplification of each instruction in each iteration. We use |
| 289 | // this to recursively merge costs into the unrolled cost on-demand so that |
| 290 | // we don't count the cost of any dead code. This is essentially a map from |
| 291 | // <instruction, int> to <bool, bool>, but stored as a densely packed struct. |
| 292 | DenseSet<UnrolledInstState, UnrolledInstStateKeyInfo> InstCostMap; |
| 293 | |
| 294 | // A small worklist used to accumulate cost of instructions from each |
| 295 | // observable and reached root in the loop. |
| 296 | SmallVector<Instruction *, 16> CostWorklist; |
| 297 | |
| 298 | // PHI-used worklist used between iterations while accumulating cost. |
| 299 | SmallVector<Instruction *, 4> PHIUsedList; |
| 300 | |
| 301 | // Helper function to accumulate cost for instructions in the loop. |
| 302 | auto AddCostRecursively = [&](Instruction &RootI, int Iteration) { |
| 303 | assert(Iteration >= 0 && "Cannot have a negative iteration!"); |
| 304 | assert(CostWorklist.empty() && "Must start with an empty cost list"); |
| 305 | assert(PHIUsedList.empty() && "Must start with an empty phi used list"); |
| 306 | CostWorklist.push_back(&RootI); |
| 307 | for (;; --Iteration) { |
| 308 | do { |
| 309 | Instruction *I = CostWorklist.pop_back_val(); |
| 310 | |
| 311 | // InstCostMap only uses I and Iteration as a key, the other two values |
| 312 | // don't matter here. |
| 313 | auto CostIter = InstCostMap.find({I, Iteration, 0, 0}); |
| 314 | if (CostIter == InstCostMap.end()) |
| 315 | // If an input to a PHI node comes from a dead path through the loop |
| 316 | // we may have no cost data for it here. What that actually means is |
| 317 | // that it is free. |
| 318 | continue; |
| 319 | auto &Cost = *CostIter; |
| 320 | if (Cost.IsCounted) |
| 321 | // Already counted this instruction. |
| 322 | continue; |
| 323 | |
| 324 | // Mark that we are counting the cost of this instruction now. |
| 325 | Cost.IsCounted = true; |
| 326 | |
| 327 | // If this is a PHI node in the loop header, just add it to the PHI set. |
| 328 | if (auto *PhiI = dyn_cast<PHINode>(I)) |
| 329 | if (PhiI->getParent() == L->getHeader()) { |
| 330 | assert(Cost.IsFree && "Loop PHIs shouldn't be evaluated as they " |
| 331 | "inherently simplify during unrolling."); |
| 332 | if (Iteration == 0) |
| 333 | continue; |
| 334 | |
| 335 | // Push the incoming value from the backedge into the PHI used list |
| 336 | // if it is an in-loop instruction. We'll use this to populate the |
| 337 | // cost worklist for the next iteration (as we count backwards). |
| 338 | if (auto *OpI = dyn_cast<Instruction>( |
| 339 | PhiI->getIncomingValueForBlock(L->getLoopLatch()))) |
| 340 | if (L->contains(OpI)) |
| 341 | PHIUsedList.push_back(OpI); |
| 342 | continue; |
| 343 | } |
| 344 | |
| 345 | // First accumulate the cost of this instruction. |
| 346 | if (!Cost.IsFree) { |
| 347 | UnrolledCost += TTI.getUserCost(I); |
| 348 | DEBUG(dbgs() << "Adding cost of instruction (iteration " << Iteration |
| 349 | << "): "); |
| 350 | DEBUG(I->dump()); |
| 351 | } |
| 352 | |
| 353 | // We must count the cost of every operand which is not free, |
| 354 | // recursively. If we reach a loop PHI node, simply add it to the set |
| 355 | // to be considered on the next iteration (backwards!). |
| 356 | for (Value *Op : I->operands()) { |
| 357 | // Check whether this operand is free due to being a constant or |
| 358 | // outside the loop. |
| 359 | auto *OpI = dyn_cast<Instruction>(Op); |
| 360 | if (!OpI || !L->contains(OpI)) |
| 361 | continue; |
| 362 | |
| 363 | // Otherwise accumulate its cost. |
| 364 | CostWorklist.push_back(OpI); |
| 365 | } |
| 366 | } while (!CostWorklist.empty()); |
| 367 | |
| 368 | if (PHIUsedList.empty()) |
| 369 | // We've exhausted the search. |
| 370 | break; |
| 371 | |
| 372 | assert(Iteration > 0 && |
| 373 | "Cannot track PHI-used values past the first iteration!"); |
| 374 | CostWorklist.append(PHIUsedList.begin(), PHIUsedList.end()); |
| 375 | PHIUsedList.clear(); |
| 376 | } |
| 377 | }; |
| 378 | |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 379 | // Ensure that we don't violate the loop structure invariants relied on by |
| 380 | // this analysis. |
| 381 | assert(L->isLoopSimplifyForm() && "Must put loop into normal form first."); |
| 382 | assert(L->isLCSSAForm(DT) && |
| 383 | "Must have loops in LCSSA form to track live-out values."); |
| 384 | |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 385 | DEBUG(dbgs() << "Starting LoopUnroll profitability analysis...\n"); |
| 386 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 387 | // Simulate execution of each iteration of the loop counting instructions, |
| 388 | // which would be simplified. |
| 389 | // Since the same load will take different values on different iterations, |
| 390 | // we literally have to go through all loop's iterations. |
| 391 | for (unsigned Iteration = 0; Iteration < TripCount; ++Iteration) { |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 392 | DEBUG(dbgs() << " Analyzing iteration " << Iteration << "\n"); |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 393 | |
| 394 | // Prepare for the iteration by collecting any simplified entry or backedge |
| 395 | // inputs. |
| 396 | for (Instruction &I : *L->getHeader()) { |
| 397 | auto *PHI = dyn_cast<PHINode>(&I); |
| 398 | if (!PHI) |
| 399 | break; |
| 400 | |
| 401 | // The loop header PHI nodes must have exactly two input: one from the |
| 402 | // loop preheader and one from the loop latch. |
| 403 | assert( |
| 404 | PHI->getNumIncomingValues() == 2 && |
| 405 | "Must have an incoming value only for the preheader and the latch."); |
| 406 | |
| 407 | Value *V = PHI->getIncomingValueForBlock( |
| 408 | Iteration == 0 ? L->getLoopPreheader() : L->getLoopLatch()); |
| 409 | Constant *C = dyn_cast<Constant>(V); |
| 410 | if (Iteration != 0 && !C) |
| 411 | C = SimplifiedValues.lookup(V); |
| 412 | if (C) |
| 413 | SimplifiedInputValues.push_back({PHI, C}); |
| 414 | } |
| 415 | |
| 416 | // Now clear and re-populate the map for the next iteration. |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 417 | SimplifiedValues.clear(); |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 418 | while (!SimplifiedInputValues.empty()) |
| 419 | SimplifiedValues.insert(SimplifiedInputValues.pop_back_val()); |
| 420 | |
Michael Zolotukhin | 9f520eb | 2016-02-26 02:57:05 +0000 | [diff] [blame] | 421 | UnrolledInstAnalyzer Analyzer(Iteration, SimplifiedValues, SE, L); |
Chandler Carruth | f174a15 | 2015-05-22 02:47:29 +0000 | [diff] [blame] | 422 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 423 | BBWorklist.clear(); |
| 424 | BBWorklist.insert(L->getHeader()); |
| 425 | // Note that we *must not* cache the size, this loop grows the worklist. |
| 426 | for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) { |
| 427 | BasicBlock *BB = BBWorklist[Idx]; |
Chandler Carruth | f174a15 | 2015-05-22 02:47:29 +0000 | [diff] [blame] | 428 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 429 | // Visit all instructions in the given basic block and try to simplify |
| 430 | // it. We don't change the actual IR, just count optimization |
| 431 | // opportunities. |
| 432 | for (Instruction &I : *BB) { |
Dehao Chen | 977853b | 2016-09-30 18:30:04 +0000 | [diff] [blame] | 433 | if (isa<DbgInfoIntrinsic>(I)) |
| 434 | continue; |
| 435 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 436 | // Track this instruction's expected baseline cost when executing the |
| 437 | // rolled loop form. |
| 438 | RolledDynamicCost += TTI.getUserCost(&I); |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 439 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 440 | // Visit the instruction to analyze its loop cost after unrolling, |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 441 | // and if the visitor returns true, mark the instruction as free after |
| 442 | // unrolling and continue. |
| 443 | bool IsFree = Analyzer.visit(I); |
| 444 | bool Inserted = InstCostMap.insert({&I, (int)Iteration, |
| 445 | (unsigned)IsFree, |
| 446 | /*IsCounted*/ false}).second; |
| 447 | (void)Inserted; |
| 448 | assert(Inserted && "Cannot have a state for an unvisited instruction!"); |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 449 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 450 | if (IsFree) |
| 451 | continue; |
| 452 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 453 | // Can't properly model a cost of a call. |
| 454 | // FIXME: With a proper cost model we should be able to do it. |
| 455 | if(isa<CallInst>(&I)) |
| 456 | return None; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 457 | |
Haicheng Wu | e787763 | 2016-08-17 22:42:58 +0000 | [diff] [blame] | 458 | // If the instruction might have a side-effect recursively account for |
| 459 | // the cost of it and all the instructions leading up to it. |
| 460 | if (I.mayHaveSideEffects()) |
| 461 | AddCostRecursively(I, Iteration); |
| 462 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 463 | // If unrolled body turns out to be too big, bail out. |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 464 | if (UnrolledCost > MaxUnrolledLoopSize) { |
| 465 | DEBUG(dbgs() << " Exceeded threshold.. exiting.\n" |
| 466 | << " UnrolledCost: " << UnrolledCost |
| 467 | << ", MaxUnrolledLoopSize: " << MaxUnrolledLoopSize |
| 468 | << "\n"); |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 469 | return None; |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 470 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 471 | } |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 472 | |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 473 | TerminatorInst *TI = BB->getTerminator(); |
| 474 | |
| 475 | // Add in the live successors by first checking whether we have terminator |
| 476 | // that may be simplified based on the values simplified by this call. |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 477 | BasicBlock *KnownSucc = nullptr; |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 478 | if (BranchInst *BI = dyn_cast<BranchInst>(TI)) { |
| 479 | if (BI->isConditional()) { |
| 480 | if (Constant *SimpleCond = |
| 481 | SimplifiedValues.lookup(BI->getCondition())) { |
Michael Zolotukhin | 3a7d55b | 2015-07-29 18:10:29 +0000 | [diff] [blame] | 482 | // Just take the first successor if condition is undef |
| 483 | if (isa<UndefValue>(SimpleCond)) |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 484 | KnownSucc = BI->getSuccessor(0); |
| 485 | else if (ConstantInt *SimpleCondVal = |
| 486 | dyn_cast<ConstantInt>(SimpleCond)) |
| 487 | KnownSucc = BI->getSuccessor(SimpleCondVal->isZero() ? 1 : 0); |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 488 | } |
| 489 | } |
| 490 | } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) { |
| 491 | if (Constant *SimpleCond = |
| 492 | SimplifiedValues.lookup(SI->getCondition())) { |
Michael Zolotukhin | 3a7d55b | 2015-07-29 18:10:29 +0000 | [diff] [blame] | 493 | // Just take the first successor if condition is undef |
| 494 | if (isa<UndefValue>(SimpleCond)) |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 495 | KnownSucc = SI->getSuccessor(0); |
| 496 | else if (ConstantInt *SimpleCondVal = |
| 497 | dyn_cast<ConstantInt>(SimpleCond)) |
| 498 | KnownSucc = SI->findCaseValue(SimpleCondVal).getCaseSuccessor(); |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 499 | } |
| 500 | } |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 501 | if (KnownSucc) { |
| 502 | if (L->contains(KnownSucc)) |
| 503 | BBWorklist.insert(KnownSucc); |
| 504 | else |
| 505 | ExitWorklist.insert({BB, KnownSucc}); |
| 506 | continue; |
| 507 | } |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 508 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 509 | // Add BB's successors to the worklist. |
| 510 | for (BasicBlock *Succ : successors(BB)) |
| 511 | if (L->contains(Succ)) |
| 512 | BBWorklist.insert(Succ); |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 513 | else |
| 514 | ExitWorklist.insert({BB, Succ}); |
Michael Zolotukhin | d2268a7 | 2016-05-18 21:20:12 +0000 | [diff] [blame] | 515 | AddCostRecursively(*TI, Iteration); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 516 | } |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 517 | |
| 518 | // If we found no optimization opportunities on the first iteration, we |
| 519 | // won't find them on later ones too. |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 520 | if (UnrolledCost == RolledDynamicCost) { |
| 521 | DEBUG(dbgs() << " No opportunities found.. exiting.\n" |
| 522 | << " UnrolledCost: " << UnrolledCost << "\n"); |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 523 | return None; |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 524 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 525 | } |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 526 | |
| 527 | while (!ExitWorklist.empty()) { |
| 528 | BasicBlock *ExitingBB, *ExitBB; |
| 529 | std::tie(ExitingBB, ExitBB) = ExitWorklist.pop_back_val(); |
| 530 | |
| 531 | for (Instruction &I : *ExitBB) { |
| 532 | auto *PN = dyn_cast<PHINode>(&I); |
| 533 | if (!PN) |
| 534 | break; |
| 535 | |
| 536 | Value *Op = PN->getIncomingValueForBlock(ExitingBB); |
| 537 | if (auto *OpI = dyn_cast<Instruction>(Op)) |
| 538 | if (L->contains(OpI)) |
| 539 | AddCostRecursively(*OpI, TripCount - 1); |
| 540 | } |
| 541 | } |
| 542 | |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 543 | DEBUG(dbgs() << "Analysis finished:\n" |
| 544 | << "UnrolledCost: " << UnrolledCost << ", " |
| 545 | << "RolledDynamicCost: " << RolledDynamicCost << "\n"); |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 546 | return {{UnrolledCost, RolledDynamicCost}}; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 547 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 548 | |
Dan Gohman | 49d08a5 | 2007-05-08 15:14:19 +0000 | [diff] [blame] | 549 | /// ApproximateLoopSize - Approximate the size of the loop. |
Andrew Trick | f765601 | 2011-10-01 01:39:05 +0000 | [diff] [blame] | 550 | static unsigned ApproximateLoopSize(const Loop *L, unsigned &NumCalls, |
Justin Lebar | 6827de1 | 2016-03-14 23:15:34 +0000 | [diff] [blame] | 551 | bool &NotDuplicatable, bool &Convergent, |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 552 | const TargetTransformInfo &TTI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 553 | AssumptionCache *AC, unsigned BEInsns) { |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 554 | SmallPtrSet<const Value *, 32> EphValues; |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 555 | CodeMetrics::collectEphemeralValues(L, AC, EphValues); |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 556 | |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 557 | CodeMetrics Metrics; |
Sanjay Patel | 5c96723 | 2016-03-08 19:06:12 +0000 | [diff] [blame] | 558 | for (BasicBlock *BB : L->blocks()) |
| 559 | Metrics.analyzeBasicBlock(BB, TTI, EphValues); |
Owen Anderson | 04cf3fd | 2010-09-09 20:32:23 +0000 | [diff] [blame] | 560 | NumCalls = Metrics.NumInlineCandidates; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 561 | NotDuplicatable = Metrics.notDuplicatable; |
Justin Lebar | 6827de1 | 2016-03-14 23:15:34 +0000 | [diff] [blame] | 562 | Convergent = Metrics.convergent; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 563 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 564 | unsigned LoopSize = Metrics.NumInsts; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 565 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 566 | // Don't allow an estimate of size zero. This would allows unrolling of loops |
| 567 | // with huge iteration counts, which is a compile time problem even if it's |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 568 | // not a problem for code quality. Also, the code using this size may assume |
| 569 | // that each loop has at least three instructions (likely a conditional |
| 570 | // branch, a comparison feeding that branch, and some kind of loop increment |
| 571 | // feeding that comparison instruction). |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 572 | LoopSize = std::max(LoopSize, BEInsns + 1); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 573 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 574 | return LoopSize; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 575 | } |
| 576 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 577 | // Returns the loop hint metadata node with the given name (for example, |
| 578 | // "llvm.loop.unroll.count"). If no such metadata node exists, then nullptr is |
| 579 | // returned. |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 580 | static MDNode *GetUnrollMetadataForLoop(const Loop *L, StringRef Name) { |
| 581 | if (MDNode *LoopID = L->getLoopID()) |
| 582 | return GetUnrollMetadata(LoopID, Name); |
| 583 | return nullptr; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 584 | } |
| 585 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 586 | // Returns true if the loop has an unroll(full) pragma. |
| 587 | static bool HasUnrollFullPragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 588 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.full"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 589 | } |
| 590 | |
Mark Heffernan | 8939154 | 2015-08-10 17:28:08 +0000 | [diff] [blame] | 591 | // Returns true if the loop has an unroll(enable) pragma. This metadata is used |
| 592 | // for both "#pragma unroll" and "#pragma clang loop unroll(enable)" directives. |
| 593 | static bool HasUnrollEnablePragma(const Loop *L) { |
| 594 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.enable"); |
| 595 | } |
| 596 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 597 | // Returns true if the loop has an unroll(disable) pragma. |
| 598 | static bool HasUnrollDisablePragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 599 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.disable"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 600 | } |
| 601 | |
Kevin Qin | 715b01e | 2015-03-09 06:14:18 +0000 | [diff] [blame] | 602 | // Returns true if the loop has an runtime unroll(disable) pragma. |
| 603 | static bool HasRuntimeUnrollDisablePragma(const Loop *L) { |
| 604 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.runtime.disable"); |
| 605 | } |
| 606 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 607 | // If loop has an unroll_count pragma return the (necessarily |
| 608 | // positive) value from the pragma. Otherwise return 0. |
| 609 | static unsigned UnrollCountPragmaValue(const Loop *L) { |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 610 | MDNode *MD = GetUnrollMetadataForLoop(L, "llvm.loop.unroll.count"); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 611 | if (MD) { |
| 612 | assert(MD->getNumOperands() == 2 && |
| 613 | "Unroll count hint metadata should have two operands."); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 614 | unsigned Count = |
| 615 | mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue(); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 616 | assert(Count >= 1 && "Unroll count must be positive."); |
| 617 | return Count; |
| 618 | } |
| 619 | return 0; |
| 620 | } |
| 621 | |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 622 | // Remove existing unroll metadata and add unroll disable metadata to |
| 623 | // indicate the loop has already been unrolled. This prevents a loop |
| 624 | // from being unrolled more than is directed by a pragma if the loop |
| 625 | // unrolling pass is run more than once (which it generally is). |
| 626 | static void SetLoopAlreadyUnrolled(Loop *L) { |
| 627 | MDNode *LoopID = L->getLoopID(); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 628 | // First remove any existing loop unrolling metadata. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 629 | SmallVector<Metadata *, 4> MDs; |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 630 | // Reserve first location for self reference to the LoopID metadata node. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 631 | MDs.push_back(nullptr); |
Evgeny Stupachenko | 3e2f389 | 2016-06-08 20:21:24 +0000 | [diff] [blame] | 632 | |
| 633 | if (LoopID) { |
| 634 | for (unsigned i = 1, ie = LoopID->getNumOperands(); i < ie; ++i) { |
| 635 | bool IsUnrollMetadata = false; |
| 636 | MDNode *MD = dyn_cast<MDNode>(LoopID->getOperand(i)); |
| 637 | if (MD) { |
| 638 | const MDString *S = dyn_cast<MDString>(MD->getOperand(0)); |
| 639 | IsUnrollMetadata = S && S->getString().startswith("llvm.loop.unroll."); |
| 640 | } |
| 641 | if (!IsUnrollMetadata) |
| 642 | MDs.push_back(LoopID->getOperand(i)); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 643 | } |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 644 | } |
| 645 | |
| 646 | // Add unroll(disable) metadata to disable future unrolling. |
| 647 | LLVMContext &Context = L->getHeader()->getContext(); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 648 | SmallVector<Metadata *, 1> DisableOperands; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 649 | DisableOperands.push_back(MDString::get(Context, "llvm.loop.unroll.disable")); |
Mark Heffernan | f3764da | 2014-07-18 21:29:41 +0000 | [diff] [blame] | 650 | MDNode *DisableNode = MDNode::get(Context, DisableOperands); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 651 | MDs.push_back(DisableNode); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 652 | |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 653 | MDNode *NewLoopID = MDNode::get(Context, MDs); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 654 | // Set operand 0 to refer to the loop id itself. |
| 655 | NewLoopID->replaceOperandWith(0, NewLoopID); |
| 656 | L->setLoopID(NewLoopID); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 657 | } |
| 658 | |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 659 | // Computes the boosting factor for complete unrolling. |
| 660 | // If fully unrolling the loop would save a lot of RolledDynamicCost, it would |
| 661 | // be beneficial to fully unroll the loop even if unrolledcost is large. We |
| 662 | // use (RolledDynamicCost / UnrolledCost) to model the unroll benefits to adjust |
| 663 | // the unroll threshold. |
| 664 | static unsigned getFullUnrollBoostingFactor(const EstimatedUnrollCost &Cost, |
| 665 | unsigned MaxPercentThresholdBoost) { |
| 666 | if (Cost.RolledDynamicCost >= UINT_MAX / 100) |
| 667 | return 100; |
| 668 | else if (Cost.UnrolledCost != 0) |
| 669 | // The boosting factor is RolledDynamicCost / UnrolledCost |
| 670 | return std::min(100 * Cost.RolledDynamicCost / Cost.UnrolledCost, |
| 671 | MaxPercentThresholdBoost); |
| 672 | else |
| 673 | return MaxPercentThresholdBoost; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 674 | } |
| 675 | |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 676 | // Returns loop size estimation for unrolled loop. |
| 677 | static uint64_t getUnrolledLoopSize( |
| 678 | unsigned LoopSize, |
| 679 | TargetTransformInfo::UnrollingPreferences &UP) { |
| 680 | assert(LoopSize >= UP.BEInsns && "LoopSize should not be less than BEInsns!"); |
| 681 | return (uint64_t)(LoopSize - UP.BEInsns) * UP.Count + UP.BEInsns; |
| 682 | } |
| 683 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 684 | // Returns true if unroll count was set explicitly. |
| 685 | // Calculates unroll count and writes it to UP.Count. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 686 | static bool computeUnrollCount( |
| 687 | Loop *L, const TargetTransformInfo &TTI, DominatorTree &DT, LoopInfo *LI, |
| 688 | ScalarEvolution *SE, OptimizationRemarkEmitter *ORE, unsigned &TripCount, |
| 689 | unsigned MaxTripCount, unsigned &TripMultiple, unsigned LoopSize, |
| 690 | TargetTransformInfo::UnrollingPreferences &UP, bool &UseUpperBound) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 691 | // Check for explicit Count. |
| 692 | // 1st priority is unroll count set by "unroll-count" option. |
| 693 | bool UserUnrollCount = UnrollCount.getNumOccurrences() > 0; |
| 694 | if (UserUnrollCount) { |
| 695 | UP.Count = UnrollCount; |
| 696 | UP.AllowExpensiveTripCount = true; |
| 697 | UP.Force = true; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 698 | if (UP.AllowRemainder && getUnrolledLoopSize(LoopSize, UP) < UP.Threshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 699 | return true; |
| 700 | } |
| 701 | |
| 702 | // 2nd priority is unroll count set by pragma. |
| 703 | unsigned PragmaCount = UnrollCountPragmaValue(L); |
| 704 | if (PragmaCount > 0) { |
| 705 | UP.Count = PragmaCount; |
| 706 | UP.Runtime = true; |
| 707 | UP.AllowExpensiveTripCount = true; |
| 708 | UP.Force = true; |
| 709 | if (UP.AllowRemainder && |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 710 | getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 711 | return true; |
| 712 | } |
| 713 | bool PragmaFullUnroll = HasUnrollFullPragma(L); |
| 714 | if (PragmaFullUnroll && TripCount != 0) { |
| 715 | UP.Count = TripCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 716 | if (getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 717 | return false; |
| 718 | } |
| 719 | |
| 720 | bool PragmaEnableUnroll = HasUnrollEnablePragma(L); |
| 721 | bool ExplicitUnroll = PragmaCount > 0 || PragmaFullUnroll || |
| 722 | PragmaEnableUnroll || UserUnrollCount; |
| 723 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 724 | if (ExplicitUnroll && TripCount != 0) { |
| 725 | // If the loop has an unrolling pragma, we want to be more aggressive with |
| 726 | // unrolling limits. Set thresholds to at least the PragmaThreshold value |
| 727 | // which is larger than the default limits. |
| 728 | UP.Threshold = std::max<unsigned>(UP.Threshold, PragmaUnrollThreshold); |
| 729 | UP.PartialThreshold = |
| 730 | std::max<unsigned>(UP.PartialThreshold, PragmaUnrollThreshold); |
| 731 | } |
| 732 | |
| 733 | // 3rd priority is full unroll count. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 734 | // Full unroll makes sense only when TripCount or its upper bound could be |
| 735 | // statically calculated. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 736 | // Also we need to check if we exceed FullUnrollMaxCount. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 737 | // If using the upper bound to unroll, TripMultiple should be set to 1 because |
| 738 | // we do not know when loop may exit. |
| 739 | // MaxTripCount and ExactTripCount cannot both be non zero since we only |
| 740 | // compute the former when the latter is zero. |
| 741 | unsigned ExactTripCount = TripCount; |
| 742 | assert((ExactTripCount == 0 || MaxTripCount == 0) && |
| 743 | "ExtractTripCound and MaxTripCount cannot both be non zero."); |
| 744 | unsigned FullUnrollTripCount = ExactTripCount ? ExactTripCount : MaxTripCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 745 | UP.Count = FullUnrollTripCount; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 746 | if (FullUnrollTripCount && FullUnrollTripCount <= UP.FullUnrollMaxCount) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 747 | // When computing the unrolled size, note that BEInsns are not replicated |
| 748 | // like the rest of the loop body. |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 749 | if (getUnrolledLoopSize(LoopSize, UP) < UP.Threshold) { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 750 | UseUpperBound = (MaxTripCount == FullUnrollTripCount); |
| 751 | TripCount = FullUnrollTripCount; |
| 752 | TripMultiple = UP.UpperBound ? 1 : TripMultiple; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 753 | return ExplicitUnroll; |
| 754 | } else { |
| 755 | // The loop isn't that small, but we still can fully unroll it if that |
| 756 | // helps to remove a significant number of instructions. |
| 757 | // To check that, run additional analysis on the loop. |
| 758 | if (Optional<EstimatedUnrollCost> Cost = analyzeLoopUnrollCost( |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 759 | L, FullUnrollTripCount, DT, *SE, TTI, |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 760 | UP.Threshold * UP.MaxPercentThresholdBoost / 100)) { |
| 761 | unsigned Boost = |
| 762 | getFullUnrollBoostingFactor(*Cost, UP.MaxPercentThresholdBoost); |
| 763 | if (Cost->UnrolledCost < UP.Threshold * Boost / 100) { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 764 | UseUpperBound = (MaxTripCount == FullUnrollTripCount); |
| 765 | TripCount = FullUnrollTripCount; |
| 766 | TripMultiple = UP.UpperBound ? 1 : TripMultiple; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 767 | return ExplicitUnroll; |
| 768 | } |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 769 | } |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 770 | } |
| 771 | } |
| 772 | |
| 773 | // 4rd priority is partial unrolling. |
| 774 | // Try partial unroll only when TripCount could be staticaly calculated. |
| 775 | if (TripCount) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 776 | UP.Partial |= ExplicitUnroll; |
| 777 | if (!UP.Partial) { |
| 778 | DEBUG(dbgs() << " will not try to unroll partially because " |
| 779 | << "-unroll-allow-partial not given\n"); |
| 780 | UP.Count = 0; |
| 781 | return false; |
| 782 | } |
Haicheng Wu | 430b3e4 | 2016-10-27 18:40:02 +0000 | [diff] [blame] | 783 | if (UP.Count == 0) |
| 784 | UP.Count = TripCount; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 785 | if (UP.PartialThreshold != NoThreshold) { |
| 786 | // Reduce unroll count to be modulo of TripCount for partial unrolling. |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 787 | if (getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
| 788 | UP.Count = |
| 789 | (std::max(UP.PartialThreshold, UP.BEInsns + 1) - UP.BEInsns) / |
| 790 | (LoopSize - UP.BEInsns); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 791 | if (UP.Count > UP.MaxCount) |
| 792 | UP.Count = UP.MaxCount; |
| 793 | while (UP.Count != 0 && TripCount % UP.Count != 0) |
| 794 | UP.Count--; |
| 795 | if (UP.AllowRemainder && UP.Count <= 1) { |
| 796 | // If there is no Count that is modulo of TripCount, set Count to |
| 797 | // largest power-of-two factor that satisfies the threshold limit. |
| 798 | // As we'll create fixup loop, do the type of unrolling only if |
| 799 | // remainder loop is allowed. |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 800 | UP.Count = UP.DefaultUnrollRuntimeCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 801 | while (UP.Count != 0 && |
| 802 | getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 803 | UP.Count >>= 1; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 804 | } |
| 805 | if (UP.Count < 2) { |
| 806 | if (PragmaEnableUnroll) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 807 | ORE->emit( |
| 808 | OptimizationRemarkMissed(DEBUG_TYPE, "UnrollAsDirectedTooLarge", |
| 809 | L->getStartLoc(), L->getHeader()) |
| 810 | << "Unable to unroll loop as directed by unroll(enable) pragma " |
| 811 | "because unrolled size is too large."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 812 | UP.Count = 0; |
| 813 | } |
| 814 | } else { |
| 815 | UP.Count = TripCount; |
| 816 | } |
| 817 | if ((PragmaFullUnroll || PragmaEnableUnroll) && TripCount && |
| 818 | UP.Count != TripCount) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 819 | ORE->emit( |
| 820 | OptimizationRemarkMissed(DEBUG_TYPE, "FullUnrollAsDirectedTooLarge", |
| 821 | L->getStartLoc(), L->getHeader()) |
| 822 | << "Unable to fully unroll loop as directed by unroll pragma because " |
| 823 | "unrolled size is too large."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 824 | return ExplicitUnroll; |
| 825 | } |
| 826 | assert(TripCount == 0 && |
| 827 | "All cases when TripCount is constant should be covered here."); |
| 828 | if (PragmaFullUnroll) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 829 | ORE->emit( |
| 830 | OptimizationRemarkMissed(DEBUG_TYPE, |
| 831 | "CantFullUnrollAsDirectedRuntimeTripCount", |
| 832 | L->getStartLoc(), L->getHeader()) |
| 833 | << "Unable to fully unroll loop as directed by unroll(full) pragma " |
| 834 | "because loop has a runtime trip count."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 835 | |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 836 | // 5th priority is loop peeling |
| 837 | computePeelCount(L, LoopSize, UP); |
| 838 | if (UP.PeelCount) { |
| 839 | UP.Runtime = false; |
| 840 | UP.Count = 1; |
| 841 | return ExplicitUnroll; |
| 842 | } |
| 843 | |
| 844 | // 6th priority is runtime unrolling. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 845 | // Don't unroll a runtime trip count loop when it is disabled. |
| 846 | if (HasRuntimeUnrollDisablePragma(L)) { |
| 847 | UP.Count = 0; |
| 848 | return false; |
| 849 | } |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 850 | |
| 851 | // Check if the runtime trip count is too small when profile is available. |
| 852 | if (L->getHeader()->getParent()->getEntryCount()) { |
| 853 | if (auto ProfileTripCount = getLoopEstimatedTripCount(L)) { |
| 854 | if (*ProfileTripCount < FlatLoopTripCountThreshold) |
| 855 | return false; |
| 856 | else |
| 857 | UP.AllowExpensiveTripCount = true; |
| 858 | } |
| 859 | } |
| 860 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 861 | // Reduce count based on the type of unrolling and the threshold values. |
| 862 | UP.Runtime |= PragmaEnableUnroll || PragmaCount > 0 || UserUnrollCount; |
| 863 | if (!UP.Runtime) { |
| 864 | DEBUG(dbgs() << " will not try to unroll loop with runtime trip count " |
| 865 | << "-unroll-runtime not given\n"); |
| 866 | UP.Count = 0; |
| 867 | return false; |
| 868 | } |
| 869 | if (UP.Count == 0) |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 870 | UP.Count = UP.DefaultUnrollRuntimeCount; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 871 | |
| 872 | // Reduce unroll count to be the largest power-of-two factor of |
| 873 | // the original count which satisfies the threshold limit. |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 874 | while (UP.Count != 0 && |
| 875 | getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 876 | UP.Count >>= 1; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 877 | |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 878 | #ifndef NDEBUG |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 879 | unsigned OrigCount = UP.Count; |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 880 | #endif |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 881 | |
| 882 | if (!UP.AllowRemainder && UP.Count != 0 && (TripMultiple % UP.Count) != 0) { |
| 883 | while (UP.Count != 0 && TripMultiple % UP.Count != 0) |
| 884 | UP.Count >>= 1; |
| 885 | DEBUG(dbgs() << "Remainder loop is restricted (that could architecture " |
| 886 | "specific or because the loop contains a convergent " |
| 887 | "instruction), so unroll count must divide the trip " |
| 888 | "multiple, " |
| 889 | << TripMultiple << ". Reducing unroll count from " |
| 890 | << OrigCount << " to " << UP.Count << ".\n"); |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 891 | using namespace ore; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 892 | if (PragmaCount > 0 && !UP.AllowRemainder) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 893 | ORE->emit( |
| 894 | OptimizationRemarkMissed(DEBUG_TYPE, |
| 895 | "DifferentUnrollCountFromDirected", |
| 896 | L->getStartLoc(), L->getHeader()) |
| 897 | << "Unable to unroll loop the number of times directed by " |
| 898 | "unroll_count pragma because remainder loop is restricted " |
| 899 | "(that could architecture specific or because the loop " |
| 900 | "contains a convergent instruction) and so must have an unroll " |
| 901 | "count that divides the loop trip multiple of " |
| 902 | << NV("TripMultiple", TripMultiple) << ". Unrolling instead " |
| 903 | << NV("UnrollCount", UP.Count) << " time(s)."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 904 | } |
| 905 | |
| 906 | if (UP.Count > UP.MaxCount) |
| 907 | UP.Count = UP.MaxCount; |
| 908 | DEBUG(dbgs() << " partially unrolling with count: " << UP.Count << "\n"); |
| 909 | if (UP.Count < 2) |
| 910 | UP.Count = 0; |
| 911 | return ExplicitUnroll; |
| 912 | } |
| 913 | |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 914 | static bool tryToUnrollLoop(Loop *L, DominatorTree &DT, LoopInfo *LI, |
| 915 | ScalarEvolution *SE, const TargetTransformInfo &TTI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 916 | AssumptionCache &AC, OptimizationRemarkEmitter &ORE, |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 917 | bool PreserveLCSSA, |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 918 | Optional<unsigned> ProvidedCount, |
| 919 | Optional<unsigned> ProvidedThreshold, |
| 920 | Optional<bool> ProvidedAllowPartial, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 921 | Optional<bool> ProvidedRuntime, |
| 922 | Optional<bool> ProvidedUpperBound) { |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 923 | DEBUG(dbgs() << "Loop Unroll: F[" << L->getHeader()->getParent()->getName() |
| 924 | << "] Loop %" << L->getHeader()->getName() << "\n"); |
Haicheng Wu | 731b04c | 2016-11-23 19:39:26 +0000 | [diff] [blame] | 925 | if (HasUnrollDisablePragma(L)) |
| 926 | return false; |
| 927 | if (!L->isLoopSimplifyForm()) { |
| 928 | DEBUG( |
| 929 | dbgs() << " Not unrolling loop which is not in loop-simplify form.\n"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 930 | return false; |
| 931 | } |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 932 | |
| 933 | unsigned NumInlineCandidates; |
| 934 | bool NotDuplicatable; |
| 935 | bool Convergent; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 936 | TargetTransformInfo::UnrollingPreferences UP = gatherUnrollingPreferences( |
| 937 | L, TTI, ProvidedThreshold, ProvidedCount, ProvidedAllowPartial, |
| 938 | ProvidedRuntime, ProvidedUpperBound); |
Haicheng Wu | 731b04c | 2016-11-23 19:39:26 +0000 | [diff] [blame] | 939 | // Exit early if unrolling is disabled. |
| 940 | if (UP.Threshold == 0 && (!UP.Partial || UP.PartialThreshold == 0)) |
| 941 | return false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 942 | unsigned LoopSize = ApproximateLoopSize( |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 943 | L, NumInlineCandidates, NotDuplicatable, Convergent, TTI, &AC, UP.BEInsns); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 944 | DEBUG(dbgs() << " Loop Size = " << LoopSize << "\n"); |
| 945 | if (NotDuplicatable) { |
| 946 | DEBUG(dbgs() << " Not unrolling loop which contains non-duplicatable" |
| 947 | << " instructions.\n"); |
| 948 | return false; |
| 949 | } |
| 950 | if (NumInlineCandidates != 0) { |
| 951 | DEBUG(dbgs() << " Not unrolling loop with inlinable calls.\n"); |
| 952 | return false; |
| 953 | } |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 954 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 955 | // Find trip count and trip multiple if count is not available |
| 956 | unsigned TripCount = 0; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 957 | unsigned MaxTripCount = 0; |
Andrew Trick | 1cabe54 | 2011-07-23 00:33:05 +0000 | [diff] [blame] | 958 | unsigned TripMultiple = 1; |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 959 | // If there are multiple exiting blocks but one of them is the latch, use the |
| 960 | // latch for the trip count estimation. Otherwise insist on a single exiting |
| 961 | // block for the trip count estimation. |
| 962 | BasicBlock *ExitingBlock = L->getLoopLatch(); |
| 963 | if (!ExitingBlock || !L->isLoopExiting(ExitingBlock)) |
| 964 | ExitingBlock = L->getExitingBlock(); |
| 965 | if (ExitingBlock) { |
| 966 | TripCount = SE->getSmallConstantTripCount(L, ExitingBlock); |
| 967 | TripMultiple = SE->getSmallConstantTripMultiple(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 968 | } |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 969 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 970 | // If the loop contains a convergent operation, the prelude we'd add |
| 971 | // to do the first few instructions before we hit the unrolled loop |
| 972 | // is unsafe -- it adds a control-flow dependency to the convergent |
| 973 | // operation. Therefore restrict remainder loop (try unrollig without). |
| 974 | // |
| 975 | // TODO: This is quite conservative. In practice, convergent_op() |
| 976 | // is likely to be called unconditionally in the loop. In this |
| 977 | // case, the program would be ill-formed (on most architectures) |
| 978 | // unless n were the same on all threads in a thread group. |
| 979 | // Assuming n is the same on all threads, any kind of unrolling is |
| 980 | // safe. But currently llvm's notion of convergence isn't powerful |
| 981 | // enough to express this. |
| 982 | if (Convergent) |
| 983 | UP.AllowRemainder = false; |
Eli Bendersky | dc6de2c | 2014-06-12 18:05:39 +0000 | [diff] [blame] | 984 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 985 | // Try to find the trip count upper bound if we cannot find the exact trip |
| 986 | // count. |
| 987 | bool MaxOrZero = false; |
| 988 | if (!TripCount) { |
| 989 | MaxTripCount = SE->getSmallConstantMaxTripCount(L); |
| 990 | MaxOrZero = SE->isBackedgeTakenCountMaxOrZero(L); |
| 991 | // We can unroll by the upper bound amount if it's generally allowed or if |
| 992 | // we know that the loop is executed either the upper bound or zero times. |
| 993 | // (MaxOrZero unrolling keeps only the first loop test, so the number of |
| 994 | // loop tests remains the same compared to the non-unrolled version, whereas |
| 995 | // the generic upper bound unrolling keeps all but the last loop test so the |
| 996 | // number of loop tests goes up which may end up being worse on targets with |
| 997 | // constriained branch predictor resources so is controlled by an option.) |
| 998 | // In addition we only unroll small upper bounds. |
| 999 | if (!(UP.UpperBound || MaxOrZero) || MaxTripCount > UnrollMaxUpperBound) { |
| 1000 | MaxTripCount = 0; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1001 | } |
| 1002 | } |
| 1003 | |
| 1004 | // computeUnrollCount() decides whether it is beneficial to use upper bound to |
| 1005 | // fully unroll the loop. |
| 1006 | bool UseUpperBound = false; |
| 1007 | bool IsCountSetExplicitly = |
| 1008 | computeUnrollCount(L, TTI, DT, LI, SE, &ORE, TripCount, MaxTripCount, |
| 1009 | TripMultiple, LoopSize, UP, UseUpperBound); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1010 | if (!UP.Count) |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 1011 | return false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1012 | // Unroll factor (Count) must be less or equal to TripCount. |
| 1013 | if (TripCount && UP.Count > TripCount) |
| 1014 | UP.Count = TripCount; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 1015 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 1016 | // Unroll the loop. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1017 | if (!UnrollLoop(L, UP.Count, TripCount, UP.Force, UP.Runtime, |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 1018 | UP.AllowExpensiveTripCount, UseUpperBound, MaxOrZero, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1019 | TripMultiple, UP.PeelCount, LI, SE, &DT, &AC, &ORE, |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1020 | PreserveLCSSA)) |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 1021 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 1022 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1023 | // If loop has an unroll count pragma or unrolled by explicitly set count |
| 1024 | // mark loop as unrolled to prevent unrolling beyond that requested. |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1025 | // If the loop was peeled, we already "used up" the profile information |
| 1026 | // we had, so we don't want to unroll or peel again. |
| 1027 | if (IsCountSetExplicitly || UP.PeelCount) |
David L Kreitzer | 8d441eb | 2016-03-25 14:24:52 +0000 | [diff] [blame] | 1028 | SetLoopAlreadyUnrolled(L); |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1029 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1030 | return true; |
| 1031 | } |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1032 | |
| 1033 | namespace { |
| 1034 | class LoopUnroll : public LoopPass { |
| 1035 | public: |
| 1036 | static char ID; // Pass ID, replacement for typeid |
| 1037 | LoopUnroll(Optional<unsigned> Threshold = None, |
| 1038 | Optional<unsigned> Count = None, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1039 | Optional<bool> AllowPartial = None, Optional<bool> Runtime = None, |
| 1040 | Optional<bool> UpperBound = None) |
Benjamin Kramer | 82de7d3 | 2016-05-27 14:27:24 +0000 | [diff] [blame] | 1041 | : LoopPass(ID), ProvidedCount(std::move(Count)), |
| 1042 | ProvidedThreshold(Threshold), ProvidedAllowPartial(AllowPartial), |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1043 | ProvidedRuntime(Runtime), ProvidedUpperBound(UpperBound) { |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1044 | initializeLoopUnrollPass(*PassRegistry::getPassRegistry()); |
| 1045 | } |
| 1046 | |
| 1047 | Optional<unsigned> ProvidedCount; |
| 1048 | Optional<unsigned> ProvidedThreshold; |
| 1049 | Optional<bool> ProvidedAllowPartial; |
| 1050 | Optional<bool> ProvidedRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1051 | Optional<bool> ProvidedUpperBound; |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1052 | |
| 1053 | bool runOnLoop(Loop *L, LPPassManager &) override { |
Andrew Kaylor | aa641a5 | 2016-04-22 22:06:11 +0000 | [diff] [blame] | 1054 | if (skipLoop(L)) |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1055 | return false; |
| 1056 | |
| 1057 | Function &F = *L->getHeader()->getParent(); |
| 1058 | |
| 1059 | auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); |
| 1060 | LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
| 1061 | ScalarEvolution *SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); |
| 1062 | const TargetTransformInfo &TTI = |
| 1063 | getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1064 | auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); |
Adam Nemet | 4f155b6 | 2016-08-26 15:58:34 +0000 | [diff] [blame] | 1065 | // For the old PM, we can't use OptimizationRemarkEmitter as an analysis |
| 1066 | // pass. Function analyses need to be preserved across loop transformations |
| 1067 | // but ORE cannot be preserved (see comment before the pass definition). |
| 1068 | OptimizationRemarkEmitter ORE(&F); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1069 | bool PreserveLCSSA = mustPreserveAnalysisID(LCSSAID); |
| 1070 | |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1071 | return tryToUnrollLoop(L, DT, LI, SE, TTI, AC, ORE, PreserveLCSSA, |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1072 | ProvidedCount, ProvidedThreshold, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1073 | ProvidedAllowPartial, ProvidedRuntime, |
| 1074 | ProvidedUpperBound); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1075 | } |
| 1076 | |
| 1077 | /// This transformation requires natural loop information & requires that |
| 1078 | /// loop preheaders be inserted into the CFG... |
| 1079 | /// |
| 1080 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1081 | AU.addRequired<AssumptionCacheTracker>(); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1082 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 1083 | // FIXME: Loop passes are required to preserve domtree, and for now we just |
| 1084 | // recreate dom info if anything gets unrolled. |
| 1085 | getLoopAnalysisUsage(AU); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1086 | } |
| 1087 | }; |
| 1088 | } |
| 1089 | |
| 1090 | char LoopUnroll::ID = 0; |
| 1091 | INITIALIZE_PASS_BEGIN(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1092 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 1093 | INITIALIZE_PASS_DEPENDENCY(LoopPass) |
| 1094 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1095 | INITIALIZE_PASS_END(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
| 1096 | |
| 1097 | Pass *llvm::createLoopUnrollPass(int Threshold, int Count, int AllowPartial, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1098 | int Runtime, int UpperBound) { |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1099 | // TODO: It would make more sense for this function to take the optionals |
| 1100 | // directly, but that's dangerous since it would silently break out of tree |
| 1101 | // callers. |
| 1102 | return new LoopUnroll(Threshold == -1 ? None : Optional<unsigned>(Threshold), |
| 1103 | Count == -1 ? None : Optional<unsigned>(Count), |
| 1104 | AllowPartial == -1 ? None |
| 1105 | : Optional<bool>(AllowPartial), |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1106 | Runtime == -1 ? None : Optional<bool>(Runtime), |
| 1107 | UpperBound == -1 ? None : Optional<bool>(UpperBound)); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1108 | } |
| 1109 | |
| 1110 | Pass *llvm::createSimpleLoopUnrollPass() { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1111 | return llvm::createLoopUnrollPass(-1, -1, 0, 0, 0); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1112 | } |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1113 | |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame^] | 1114 | PreservedAnalyses LoopUnrollPass::run(Loop &L, LoopAnalysisManager &AM, |
| 1115 | LoopStandardAnalysisResults &AR, |
| 1116 | LPMUpdater &) { |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1117 | const auto &FAM = |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame^] | 1118 | AM.getResult<FunctionAnalysisManagerLoopProxy>(L, AR).getManager(); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1119 | Function *F = L.getHeader()->getParent(); |
| 1120 | |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1121 | auto *ORE = FAM.getCachedResult<OptimizationRemarkEmitterAnalysis>(*F); |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame^] | 1122 | // FIXME: This should probably be optional rather than required. |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1123 | if (!ORE) |
| 1124 | report_fatal_error("LoopUnrollPass: OptimizationRemarkEmitterAnalysis not " |
| 1125 | "cached at a higher level"); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1126 | |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame^] | 1127 | bool Changed = tryToUnrollLoop(&L, AR.DT, &AR.LI, &AR.SE, AR.TTI, AR.AC, *ORE, |
| 1128 | /*PreserveLCSSA*/ true, ProvidedCount, |
| 1129 | ProvidedThreshold, ProvidedAllowPartial, |
| 1130 | ProvidedRuntime, ProvidedUpperBound); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1131 | |
| 1132 | if (!Changed) |
| 1133 | return PreservedAnalyses::all(); |
| 1134 | return getLoopPassPreservedAnalyses(); |
| 1135 | } |