Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1 | //===-- LoopUnroll.cpp - Loop unroller pass -------------------------------===// |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements a simple loop unroller. It works best when loops have |
| 11 | // been canonicalized by the -indvars pass, allowing it to determine the trip |
| 12 | // counts of loops easily. |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 15 | #include "llvm/Transforms/Scalar/LoopUnrollPass.h" |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/SetVector.h" |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 17 | #include "llvm/Analysis/AssumptionCache.h" |
Chris Lattner | 679572e | 2011-01-02 07:35:53 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/CodeMetrics.h" |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 19 | #include "llvm/Analysis/GlobalsModRef.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/InstructionSimplify.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 21 | #include "llvm/Analysis/LoopPass.h" |
Michael Zolotukhin | 1da4afd | 2016-02-08 23:03:59 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/LoopUnrollAnalyzer.h" |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/OptimizationDiagnosticInfo.h" |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/ScalarEvolution.h" |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 25 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 26 | #include "llvm/IR/DataLayout.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 27 | #include "llvm/IR/Dominators.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 28 | #include "llvm/IR/InstVisitor.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 29 | #include "llvm/IR/IntrinsicInst.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 30 | #include "llvm/IR/Metadata.h" |
Reid Spencer | 7c16caa | 2004-09-01 22:55:40 +0000 | [diff] [blame] | 31 | #include "llvm/Support/CommandLine.h" |
| 32 | #include "llvm/Support/Debug.h" |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 33 | #include "llvm/Support/raw_ostream.h" |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 34 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 35 | #include "llvm/Transforms/Scalar/LoopPassManager.h" |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 36 | #include "llvm/Transforms/Utils/LoopUtils.h" |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 37 | #include "llvm/Transforms/Utils/UnrollLoop.h" |
Duncan Sands | 67933e6 | 2008-05-16 09:30:00 +0000 | [diff] [blame] | 38 | #include <climits> |
Benjamin Kramer | 82de7d3 | 2016-05-27 14:27:24 +0000 | [diff] [blame] | 39 | #include <utility> |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 40 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 41 | using namespace llvm; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 42 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 43 | #define DEBUG_TYPE "loop-unroll" |
| 44 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 45 | static cl::opt<unsigned> |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 46 | UnrollThreshold("unroll-threshold", cl::Hidden, |
Dehao Chen | c3f87f0 | 2017-01-17 23:39:33 +0000 | [diff] [blame] | 47 | cl::desc("The cost threshold for loop unrolling")); |
| 48 | |
| 49 | static cl::opt<unsigned> UnrollPartialThreshold( |
| 50 | "unroll-partial-threshold", cl::Hidden, |
| 51 | cl::desc("The cost threshold for partial loop unrolling")); |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 52 | |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 53 | static cl::opt<unsigned> UnrollMaxPercentThresholdBoost( |
| 54 | "unroll-max-percent-threshold-boost", cl::init(400), cl::Hidden, |
| 55 | cl::desc("The maximum 'boost' (represented as a percentage >= 100) applied " |
| 56 | "to the threshold when aggressively unrolling a loop due to the " |
| 57 | "dynamic cost savings. If completely unrolling a loop will reduce " |
| 58 | "the total runtime from X to Y, we boost the loop unroll " |
| 59 | "threshold to DefaultThreshold*std::min(MaxPercentThresholdBoost, " |
| 60 | "X/Y). This limit avoids excessive code bloat.")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 61 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 62 | static cl::opt<unsigned> UnrollMaxIterationsCountToAnalyze( |
Michael Zolotukhin | 8f7a242 | 2016-05-24 23:00:05 +0000 | [diff] [blame] | 63 | "unroll-max-iteration-count-to-analyze", cl::init(10), cl::Hidden, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 64 | cl::desc("Don't allow loop unrolling to simulate more than this number of" |
| 65 | "iterations when checking full unroll profitability")); |
| 66 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 67 | static cl::opt<unsigned> UnrollCount( |
| 68 | "unroll-count", cl::Hidden, |
| 69 | cl::desc("Use this unroll count for all loops including those with " |
| 70 | "unroll_count pragma values, for testing purposes")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 71 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 72 | static cl::opt<unsigned> UnrollMaxCount( |
| 73 | "unroll-max-count", cl::Hidden, |
| 74 | cl::desc("Set the max unroll count for partial and runtime unrolling, for" |
| 75 | "testing purposes")); |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 76 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 77 | static cl::opt<unsigned> UnrollFullMaxCount( |
| 78 | "unroll-full-max-count", cl::Hidden, |
| 79 | cl::desc( |
| 80 | "Set the max unroll count for full unrolling, for testing purposes")); |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 81 | |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 82 | static cl::opt<bool> |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 83 | UnrollAllowPartial("unroll-allow-partial", cl::Hidden, |
| 84 | cl::desc("Allows loops to be partially unrolled until " |
| 85 | "-unroll-threshold loop size is reached.")); |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 86 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 87 | static cl::opt<bool> UnrollAllowRemainder( |
| 88 | "unroll-allow-remainder", cl::Hidden, |
| 89 | cl::desc("Allow generation of a loop remainder (extra iterations) " |
| 90 | "when unrolling a loop.")); |
| 91 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 92 | static cl::opt<bool> |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 93 | UnrollRuntime("unroll-runtime", cl::ZeroOrMore, cl::Hidden, |
| 94 | cl::desc("Unroll loops with run-time trip counts")); |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 95 | |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 96 | static cl::opt<unsigned> UnrollMaxUpperBound( |
| 97 | "unroll-max-upperbound", cl::init(8), cl::Hidden, |
| 98 | cl::desc( |
| 99 | "The max of trip count upper bound that is considered in unrolling")); |
| 100 | |
Dehao Chen | d55bc4c | 2016-05-05 00:54:54 +0000 | [diff] [blame] | 101 | static cl::opt<unsigned> PragmaUnrollThreshold( |
| 102 | "pragma-unroll-threshold", cl::init(16 * 1024), cl::Hidden, |
| 103 | cl::desc("Unrolled size limit for loops with an unroll(full) or " |
| 104 | "unroll_count pragma.")); |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 105 | |
Dehao Chen | 41d72a8 | 2016-11-17 01:17:02 +0000 | [diff] [blame] | 106 | static cl::opt<unsigned> FlatLoopTripCountThreshold( |
| 107 | "flat-loop-tripcount-threshold", cl::init(5), cl::Hidden, |
| 108 | cl::desc("If the runtime tripcount for the loop is lower than the " |
| 109 | "threshold, the loop is considered as flat and will be less " |
| 110 | "aggressively unrolled.")); |
| 111 | |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 112 | static cl::opt<bool> |
Michael Kuperstein | c2af82b | 2017-02-22 00:27:34 +0000 | [diff] [blame] | 113 | UnrollAllowPeeling("unroll-allow-peeling", cl::init(true), cl::Hidden, |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 114 | cl::desc("Allows loops to be peeled when the dynamic " |
| 115 | "trip count is known to be low.")); |
| 116 | |
Chandler Carruth | ce40fa1 | 2017-01-25 02:49:01 +0000 | [diff] [blame] | 117 | // This option isn't ever intended to be enabled, it serves to allow |
| 118 | // experiments to check the assumptions about when this kind of revisit is |
| 119 | // necessary. |
| 120 | static cl::opt<bool> UnrollRevisitChildLoops( |
| 121 | "unroll-revisit-child-loops", cl::Hidden, |
| 122 | cl::desc("Enqueue and re-visit child loops in the loop PM after unrolling. " |
| 123 | "This shouldn't typically be needed as child loops (or their " |
| 124 | "clones) were already visited.")); |
| 125 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 126 | /// A magic value for use with the Threshold parameter to indicate |
| 127 | /// that the loop unroll should be performed regardless of how much |
| 128 | /// code expansion would result. |
| 129 | static const unsigned NoThreshold = UINT_MAX; |
| 130 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 131 | /// Gather the various unrolling parameters based on the defaults, compiler |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 132 | /// flags, TTI overrides and user specified parameters. |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 133 | static TargetTransformInfo::UnrollingPreferences gatherUnrollingPreferences( |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 134 | Loop *L, const TargetTransformInfo &TTI, int OptLevel, |
| 135 | Optional<unsigned> UserThreshold, Optional<unsigned> UserCount, |
| 136 | Optional<bool> UserAllowPartial, Optional<bool> UserRuntime, |
| 137 | Optional<bool> UserUpperBound) { |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 138 | TargetTransformInfo::UnrollingPreferences UP; |
| 139 | |
| 140 | // Set up the defaults |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 141 | UP.Threshold = OptLevel > 2 ? 300 : 150; |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 142 | UP.MaxPercentThresholdBoost = 400; |
Hans Wennborg | 719b26b | 2016-05-10 21:45:55 +0000 | [diff] [blame] | 143 | UP.OptSizeThreshold = 0; |
Dehao Chen | c3f87f0 | 2017-01-17 23:39:33 +0000 | [diff] [blame] | 144 | UP.PartialThreshold = 150; |
Hans Wennborg | 719b26b | 2016-05-10 21:45:55 +0000 | [diff] [blame] | 145 | UP.PartialOptSizeThreshold = 0; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 146 | UP.Count = 0; |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 147 | UP.PeelCount = 0; |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 148 | UP.DefaultUnrollRuntimeCount = 8; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 149 | UP.MaxCount = UINT_MAX; |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 150 | UP.FullUnrollMaxCount = UINT_MAX; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 151 | UP.BEInsns = 2; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 152 | UP.Partial = false; |
| 153 | UP.Runtime = false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 154 | UP.AllowRemainder = true; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 155 | UP.AllowExpensiveTripCount = false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 156 | UP.Force = false; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 157 | UP.UpperBound = false; |
Michael Kuperstein | c2af82b | 2017-02-22 00:27:34 +0000 | [diff] [blame] | 158 | UP.AllowPeeling = true; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 159 | |
| 160 | // Override with any target specific settings |
| 161 | TTI.getUnrollingPreferences(L, UP); |
| 162 | |
| 163 | // Apply size attributes |
| 164 | if (L->getHeader()->getParent()->optForSize()) { |
| 165 | UP.Threshold = UP.OptSizeThreshold; |
| 166 | UP.PartialThreshold = UP.PartialOptSizeThreshold; |
| 167 | } |
| 168 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 169 | // Apply any user values specified by cl::opt |
Dehao Chen | c3f87f0 | 2017-01-17 23:39:33 +0000 | [diff] [blame] | 170 | if (UnrollThreshold.getNumOccurrences() > 0) |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 171 | UP.Threshold = UnrollThreshold; |
Dehao Chen | c3f87f0 | 2017-01-17 23:39:33 +0000 | [diff] [blame] | 172 | if (UnrollPartialThreshold.getNumOccurrences() > 0) |
| 173 | UP.PartialThreshold = UnrollPartialThreshold; |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 174 | if (UnrollMaxPercentThresholdBoost.getNumOccurrences() > 0) |
| 175 | UP.MaxPercentThresholdBoost = UnrollMaxPercentThresholdBoost; |
Fiona Glaser | 045afc4 | 2016-04-06 16:57:25 +0000 | [diff] [blame] | 176 | if (UnrollMaxCount.getNumOccurrences() > 0) |
| 177 | UP.MaxCount = UnrollMaxCount; |
| 178 | if (UnrollFullMaxCount.getNumOccurrences() > 0) |
| 179 | UP.FullUnrollMaxCount = UnrollFullMaxCount; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 180 | if (UnrollAllowPartial.getNumOccurrences() > 0) |
| 181 | UP.Partial = UnrollAllowPartial; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 182 | if (UnrollAllowRemainder.getNumOccurrences() > 0) |
| 183 | UP.AllowRemainder = UnrollAllowRemainder; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 184 | if (UnrollRuntime.getNumOccurrences() > 0) |
| 185 | UP.Runtime = UnrollRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 186 | if (UnrollMaxUpperBound == 0) |
| 187 | UP.UpperBound = false; |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 188 | if (UnrollAllowPeeling.getNumOccurrences() > 0) |
| 189 | UP.AllowPeeling = UnrollAllowPeeling; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 190 | |
| 191 | // Apply user values provided by argument |
| 192 | if (UserThreshold.hasValue()) { |
| 193 | UP.Threshold = *UserThreshold; |
| 194 | UP.PartialThreshold = *UserThreshold; |
| 195 | } |
| 196 | if (UserCount.hasValue()) |
| 197 | UP.Count = *UserCount; |
| 198 | if (UserAllowPartial.hasValue()) |
| 199 | UP.Partial = *UserAllowPartial; |
| 200 | if (UserRuntime.hasValue()) |
| 201 | UP.Runtime = *UserRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 202 | if (UserUpperBound.hasValue()) |
| 203 | UP.UpperBound = *UserUpperBound; |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 204 | |
Justin Bogner | a1dd493 | 2016-01-12 00:55:26 +0000 | [diff] [blame] | 205 | return UP; |
| 206 | } |
| 207 | |
Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 208 | namespace { |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 209 | /// A struct to densely store the state of an instruction after unrolling at |
| 210 | /// each iteration. |
| 211 | /// |
| 212 | /// This is designed to work like a tuple of <Instruction *, int> for the |
| 213 | /// purposes of hashing and lookup, but to be able to associate two boolean |
| 214 | /// states with each key. |
| 215 | struct UnrolledInstState { |
| 216 | Instruction *I; |
| 217 | int Iteration : 30; |
| 218 | unsigned IsFree : 1; |
| 219 | unsigned IsCounted : 1; |
| 220 | }; |
| 221 | |
| 222 | /// Hashing and equality testing for a set of the instruction states. |
| 223 | struct UnrolledInstStateKeyInfo { |
| 224 | typedef DenseMapInfo<Instruction *> PtrInfo; |
| 225 | typedef DenseMapInfo<std::pair<Instruction *, int>> PairInfo; |
| 226 | static inline UnrolledInstState getEmptyKey() { |
| 227 | return {PtrInfo::getEmptyKey(), 0, 0, 0}; |
| 228 | } |
| 229 | static inline UnrolledInstState getTombstoneKey() { |
| 230 | return {PtrInfo::getTombstoneKey(), 0, 0, 0}; |
| 231 | } |
| 232 | static inline unsigned getHashValue(const UnrolledInstState &S) { |
| 233 | return PairInfo::getHashValue({S.I, S.Iteration}); |
| 234 | } |
| 235 | static inline bool isEqual(const UnrolledInstState &LHS, |
| 236 | const UnrolledInstState &RHS) { |
| 237 | return PairInfo::isEqual({LHS.I, LHS.Iteration}, {RHS.I, RHS.Iteration}); |
| 238 | } |
| 239 | }; |
| 240 | } |
| 241 | |
| 242 | namespace { |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 243 | struct EstimatedUnrollCost { |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 244 | /// \brief The estimated cost after unrolling. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 245 | unsigned UnrolledCost; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 246 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 247 | /// \brief The estimated dynamic cost of executing the instructions in the |
| 248 | /// rolled form. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 249 | unsigned RolledDynamicCost; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 250 | }; |
| 251 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 252 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 253 | /// \brief Figure out if the loop is worth full unrolling. |
| 254 | /// |
| 255 | /// Complete loop unrolling can make some loads constant, and we need to know |
| 256 | /// if that would expose any further optimization opportunities. This routine |
Michael Zolotukhin | c4e4f33 | 2015-06-11 22:17:39 +0000 | [diff] [blame] | 257 | /// estimates this optimization. It computes cost of unrolled loop |
| 258 | /// (UnrolledCost) and dynamic cost of the original loop (RolledDynamicCost). By |
| 259 | /// dynamic cost we mean that we won't count costs of blocks that are known not |
| 260 | /// to be executed (i.e. if we have a branch in the loop and we know that at the |
| 261 | /// given iteration its condition would be resolved to true, we won't add up the |
| 262 | /// cost of the 'false'-block). |
| 263 | /// \returns Optional value, holding the RolledDynamicCost and UnrolledCost. If |
| 264 | /// the analysis failed (no benefits expected from the unrolling, or the loop is |
| 265 | /// too big to analyze), the returned value is None. |
Benjamin Kramer | fcdb1c1 | 2015-08-20 09:57:22 +0000 | [diff] [blame] | 266 | static Optional<EstimatedUnrollCost> |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 267 | analyzeLoopUnrollCost(const Loop *L, unsigned TripCount, DominatorTree &DT, |
| 268 | ScalarEvolution &SE, const TargetTransformInfo &TTI, |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 269 | unsigned MaxUnrolledLoopSize) { |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 270 | // We want to be able to scale offsets by the trip count and add more offsets |
| 271 | // to them without checking for overflows, and we already don't want to |
| 272 | // analyze *massive* trip counts, so we force the max to be reasonably small. |
| 273 | assert(UnrollMaxIterationsCountToAnalyze < (INT_MAX / 2) && |
| 274 | "The unroll iterations max is too large!"); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 275 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 276 | // Only analyze inner loops. We can't properly estimate cost of nested loops |
| 277 | // and we won't visit inner loops again anyway. |
| 278 | if (!L->empty()) |
| 279 | return None; |
| 280 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 281 | // Don't simulate loops with a big or unknown tripcount |
| 282 | if (!UnrollMaxIterationsCountToAnalyze || !TripCount || |
| 283 | TripCount > UnrollMaxIterationsCountToAnalyze) |
| 284 | return None; |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame] | 285 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 286 | SmallSetVector<BasicBlock *, 16> BBWorklist; |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 287 | SmallSetVector<std::pair<BasicBlock *, BasicBlock *>, 4> ExitWorklist; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 288 | DenseMap<Value *, Constant *> SimplifiedValues; |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 289 | SmallVector<std::pair<Value *, Constant *>, 4> SimplifiedInputValues; |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 290 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 291 | // The estimated cost of the unrolled form of the loop. We try to estimate |
| 292 | // this by simplifying as much as we can while computing the estimate. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 293 | unsigned UnrolledCost = 0; |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 294 | |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 295 | // We also track the estimated dynamic (that is, actually executed) cost in |
| 296 | // the rolled form. This helps identify cases when the savings from unrolling |
| 297 | // aren't just exposing dead control flows, but actual reduced dynamic |
| 298 | // instructions due to the simplifications which we expect to occur after |
| 299 | // unrolling. |
Dehao Chen | c3be225 | 2016-12-02 03:17:07 +0000 | [diff] [blame] | 300 | unsigned RolledDynamicCost = 0; |
Chandler Carruth | 8c86375 | 2015-02-13 03:48:38 +0000 | [diff] [blame] | 301 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 302 | // We track the simplification of each instruction in each iteration. We use |
| 303 | // this to recursively merge costs into the unrolled cost on-demand so that |
| 304 | // we don't count the cost of any dead code. This is essentially a map from |
| 305 | // <instruction, int> to <bool, bool>, but stored as a densely packed struct. |
| 306 | DenseSet<UnrolledInstState, UnrolledInstStateKeyInfo> InstCostMap; |
| 307 | |
| 308 | // A small worklist used to accumulate cost of instructions from each |
| 309 | // observable and reached root in the loop. |
| 310 | SmallVector<Instruction *, 16> CostWorklist; |
| 311 | |
| 312 | // PHI-used worklist used between iterations while accumulating cost. |
| 313 | SmallVector<Instruction *, 4> PHIUsedList; |
| 314 | |
| 315 | // Helper function to accumulate cost for instructions in the loop. |
| 316 | auto AddCostRecursively = [&](Instruction &RootI, int Iteration) { |
| 317 | assert(Iteration >= 0 && "Cannot have a negative iteration!"); |
| 318 | assert(CostWorklist.empty() && "Must start with an empty cost list"); |
| 319 | assert(PHIUsedList.empty() && "Must start with an empty phi used list"); |
| 320 | CostWorklist.push_back(&RootI); |
| 321 | for (;; --Iteration) { |
| 322 | do { |
| 323 | Instruction *I = CostWorklist.pop_back_val(); |
| 324 | |
| 325 | // InstCostMap only uses I and Iteration as a key, the other two values |
| 326 | // don't matter here. |
| 327 | auto CostIter = InstCostMap.find({I, Iteration, 0, 0}); |
| 328 | if (CostIter == InstCostMap.end()) |
| 329 | // If an input to a PHI node comes from a dead path through the loop |
| 330 | // we may have no cost data for it here. What that actually means is |
| 331 | // that it is free. |
| 332 | continue; |
| 333 | auto &Cost = *CostIter; |
| 334 | if (Cost.IsCounted) |
| 335 | // Already counted this instruction. |
| 336 | continue; |
| 337 | |
| 338 | // Mark that we are counting the cost of this instruction now. |
| 339 | Cost.IsCounted = true; |
| 340 | |
| 341 | // If this is a PHI node in the loop header, just add it to the PHI set. |
| 342 | if (auto *PhiI = dyn_cast<PHINode>(I)) |
| 343 | if (PhiI->getParent() == L->getHeader()) { |
| 344 | assert(Cost.IsFree && "Loop PHIs shouldn't be evaluated as they " |
| 345 | "inherently simplify during unrolling."); |
| 346 | if (Iteration == 0) |
| 347 | continue; |
| 348 | |
| 349 | // Push the incoming value from the backedge into the PHI used list |
| 350 | // if it is an in-loop instruction. We'll use this to populate the |
| 351 | // cost worklist for the next iteration (as we count backwards). |
| 352 | if (auto *OpI = dyn_cast<Instruction>( |
| 353 | PhiI->getIncomingValueForBlock(L->getLoopLatch()))) |
| 354 | if (L->contains(OpI)) |
| 355 | PHIUsedList.push_back(OpI); |
| 356 | continue; |
| 357 | } |
| 358 | |
| 359 | // First accumulate the cost of this instruction. |
| 360 | if (!Cost.IsFree) { |
| 361 | UnrolledCost += TTI.getUserCost(I); |
| 362 | DEBUG(dbgs() << "Adding cost of instruction (iteration " << Iteration |
| 363 | << "): "); |
| 364 | DEBUG(I->dump()); |
| 365 | } |
| 366 | |
| 367 | // We must count the cost of every operand which is not free, |
| 368 | // recursively. If we reach a loop PHI node, simply add it to the set |
| 369 | // to be considered on the next iteration (backwards!). |
| 370 | for (Value *Op : I->operands()) { |
| 371 | // Check whether this operand is free due to being a constant or |
| 372 | // outside the loop. |
| 373 | auto *OpI = dyn_cast<Instruction>(Op); |
| 374 | if (!OpI || !L->contains(OpI)) |
| 375 | continue; |
| 376 | |
| 377 | // Otherwise accumulate its cost. |
| 378 | CostWorklist.push_back(OpI); |
| 379 | } |
| 380 | } while (!CostWorklist.empty()); |
| 381 | |
| 382 | if (PHIUsedList.empty()) |
| 383 | // We've exhausted the search. |
| 384 | break; |
| 385 | |
| 386 | assert(Iteration > 0 && |
| 387 | "Cannot track PHI-used values past the first iteration!"); |
| 388 | CostWorklist.append(PHIUsedList.begin(), PHIUsedList.end()); |
| 389 | PHIUsedList.clear(); |
| 390 | } |
| 391 | }; |
| 392 | |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 393 | // Ensure that we don't violate the loop structure invariants relied on by |
| 394 | // this analysis. |
| 395 | assert(L->isLoopSimplifyForm() && "Must put loop into normal form first."); |
| 396 | assert(L->isLCSSAForm(DT) && |
| 397 | "Must have loops in LCSSA form to track live-out values."); |
| 398 | |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 399 | DEBUG(dbgs() << "Starting LoopUnroll profitability analysis...\n"); |
| 400 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 401 | // Simulate execution of each iteration of the loop counting instructions, |
| 402 | // which would be simplified. |
| 403 | // Since the same load will take different values on different iterations, |
| 404 | // we literally have to go through all loop's iterations. |
| 405 | for (unsigned Iteration = 0; Iteration < TripCount; ++Iteration) { |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 406 | DEBUG(dbgs() << " Analyzing iteration " << Iteration << "\n"); |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 407 | |
| 408 | // Prepare for the iteration by collecting any simplified entry or backedge |
| 409 | // inputs. |
| 410 | for (Instruction &I : *L->getHeader()) { |
| 411 | auto *PHI = dyn_cast<PHINode>(&I); |
| 412 | if (!PHI) |
| 413 | break; |
| 414 | |
| 415 | // The loop header PHI nodes must have exactly two input: one from the |
| 416 | // loop preheader and one from the loop latch. |
| 417 | assert( |
| 418 | PHI->getNumIncomingValues() == 2 && |
| 419 | "Must have an incoming value only for the preheader and the latch."); |
| 420 | |
| 421 | Value *V = PHI->getIncomingValueForBlock( |
| 422 | Iteration == 0 ? L->getLoopPreheader() : L->getLoopLatch()); |
| 423 | Constant *C = dyn_cast<Constant>(V); |
| 424 | if (Iteration != 0 && !C) |
| 425 | C = SimplifiedValues.lookup(V); |
| 426 | if (C) |
| 427 | SimplifiedInputValues.push_back({PHI, C}); |
| 428 | } |
| 429 | |
| 430 | // Now clear and re-populate the map for the next iteration. |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 431 | SimplifiedValues.clear(); |
Chandler Carruth | 87adb7a | 2015-08-03 20:32:27 +0000 | [diff] [blame] | 432 | while (!SimplifiedInputValues.empty()) |
| 433 | SimplifiedValues.insert(SimplifiedInputValues.pop_back_val()); |
| 434 | |
Michael Zolotukhin | 9f520eb | 2016-02-26 02:57:05 +0000 | [diff] [blame] | 435 | UnrolledInstAnalyzer Analyzer(Iteration, SimplifiedValues, SE, L); |
Chandler Carruth | f174a15 | 2015-05-22 02:47:29 +0000 | [diff] [blame] | 436 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 437 | BBWorklist.clear(); |
| 438 | BBWorklist.insert(L->getHeader()); |
| 439 | // Note that we *must not* cache the size, this loop grows the worklist. |
| 440 | for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) { |
| 441 | BasicBlock *BB = BBWorklist[Idx]; |
Chandler Carruth | f174a15 | 2015-05-22 02:47:29 +0000 | [diff] [blame] | 442 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 443 | // Visit all instructions in the given basic block and try to simplify |
| 444 | // it. We don't change the actual IR, just count optimization |
| 445 | // opportunities. |
| 446 | for (Instruction &I : *BB) { |
Dehao Chen | 977853b | 2016-09-30 18:30:04 +0000 | [diff] [blame] | 447 | if (isa<DbgInfoIntrinsic>(I)) |
| 448 | continue; |
| 449 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 450 | // Track this instruction's expected baseline cost when executing the |
| 451 | // rolled loop form. |
| 452 | RolledDynamicCost += TTI.getUserCost(&I); |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 453 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 454 | // Visit the instruction to analyze its loop cost after unrolling, |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 455 | // and if the visitor returns true, mark the instruction as free after |
| 456 | // unrolling and continue. |
| 457 | bool IsFree = Analyzer.visit(I); |
| 458 | bool Inserted = InstCostMap.insert({&I, (int)Iteration, |
| 459 | (unsigned)IsFree, |
| 460 | /*IsCounted*/ false}).second; |
| 461 | (void)Inserted; |
| 462 | assert(Inserted && "Cannot have a state for an unvisited instruction!"); |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 463 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 464 | if (IsFree) |
| 465 | continue; |
| 466 | |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 467 | // Can't properly model a cost of a call. |
| 468 | // FIXME: With a proper cost model we should be able to do it. |
| 469 | if(isa<CallInst>(&I)) |
| 470 | return None; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 471 | |
Haicheng Wu | e787763 | 2016-08-17 22:42:58 +0000 | [diff] [blame] | 472 | // If the instruction might have a side-effect recursively account for |
| 473 | // the cost of it and all the instructions leading up to it. |
| 474 | if (I.mayHaveSideEffects()) |
| 475 | AddCostRecursively(I, Iteration); |
| 476 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 477 | // If unrolled body turns out to be too big, bail out. |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 478 | if (UnrolledCost > MaxUnrolledLoopSize) { |
| 479 | DEBUG(dbgs() << " Exceeded threshold.. exiting.\n" |
| 480 | << " UnrolledCost: " << UnrolledCost |
| 481 | << ", MaxUnrolledLoopSize: " << MaxUnrolledLoopSize |
| 482 | << "\n"); |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 483 | return None; |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 484 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 485 | } |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 486 | |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 487 | TerminatorInst *TI = BB->getTerminator(); |
| 488 | |
| 489 | // Add in the live successors by first checking whether we have terminator |
| 490 | // that may be simplified based on the values simplified by this call. |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 491 | BasicBlock *KnownSucc = nullptr; |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 492 | if (BranchInst *BI = dyn_cast<BranchInst>(TI)) { |
| 493 | if (BI->isConditional()) { |
| 494 | if (Constant *SimpleCond = |
| 495 | SimplifiedValues.lookup(BI->getCondition())) { |
Michael Zolotukhin | 3a7d55b | 2015-07-29 18:10:29 +0000 | [diff] [blame] | 496 | // Just take the first successor if condition is undef |
| 497 | if (isa<UndefValue>(SimpleCond)) |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 498 | KnownSucc = BI->getSuccessor(0); |
| 499 | else if (ConstantInt *SimpleCondVal = |
| 500 | dyn_cast<ConstantInt>(SimpleCond)) |
| 501 | KnownSucc = BI->getSuccessor(SimpleCondVal->isZero() ? 1 : 0); |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 502 | } |
| 503 | } |
| 504 | } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) { |
| 505 | if (Constant *SimpleCond = |
| 506 | SimplifiedValues.lookup(SI->getCondition())) { |
Michael Zolotukhin | 3a7d55b | 2015-07-29 18:10:29 +0000 | [diff] [blame] | 507 | // Just take the first successor if condition is undef |
| 508 | if (isa<UndefValue>(SimpleCond)) |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 509 | KnownSucc = SI->getSuccessor(0); |
| 510 | else if (ConstantInt *SimpleCondVal = |
| 511 | dyn_cast<ConstantInt>(SimpleCond)) |
Chandler Carruth | 927d8e6 | 2017-04-12 07:27:28 +0000 | [diff] [blame] | 512 | KnownSucc = SI->findCaseValue(SimpleCondVal)->getCaseSuccessor(); |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 513 | } |
| 514 | } |
Michael Zolotukhin | 1ecdeda | 2016-05-26 21:42:51 +0000 | [diff] [blame] | 515 | if (KnownSucc) { |
| 516 | if (L->contains(KnownSucc)) |
| 517 | BBWorklist.insert(KnownSucc); |
| 518 | else |
| 519 | ExitWorklist.insert({BB, KnownSucc}); |
| 520 | continue; |
| 521 | } |
Michael Zolotukhin | 57776b8 | 2015-07-24 01:53:04 +0000 | [diff] [blame] | 522 | |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 523 | // Add BB's successors to the worklist. |
| 524 | for (BasicBlock *Succ : successors(BB)) |
| 525 | if (L->contains(Succ)) |
| 526 | BBWorklist.insert(Succ); |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 527 | else |
| 528 | ExitWorklist.insert({BB, Succ}); |
Michael Zolotukhin | d2268a7 | 2016-05-18 21:20:12 +0000 | [diff] [blame] | 529 | AddCostRecursively(*TI, Iteration); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 530 | } |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 531 | |
| 532 | // If we found no optimization opportunities on the first iteration, we |
| 533 | // won't find them on later ones too. |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 534 | if (UnrolledCost == RolledDynamicCost) { |
| 535 | DEBUG(dbgs() << " No opportunities found.. exiting.\n" |
| 536 | << " UnrolledCost: " << UnrolledCost << "\n"); |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 537 | return None; |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 538 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 539 | } |
Michael Zolotukhin | 963a6d9 | 2016-05-13 21:23:25 +0000 | [diff] [blame] | 540 | |
| 541 | while (!ExitWorklist.empty()) { |
| 542 | BasicBlock *ExitingBB, *ExitBB; |
| 543 | std::tie(ExitingBB, ExitBB) = ExitWorklist.pop_back_val(); |
| 544 | |
| 545 | for (Instruction &I : *ExitBB) { |
| 546 | auto *PN = dyn_cast<PHINode>(&I); |
| 547 | if (!PN) |
| 548 | break; |
| 549 | |
| 550 | Value *Op = PN->getIncomingValueForBlock(ExitingBB); |
| 551 | if (auto *OpI = dyn_cast<Instruction>(Op)) |
| 552 | if (L->contains(OpI)) |
| 553 | AddCostRecursively(*OpI, TripCount - 1); |
| 554 | } |
| 555 | } |
| 556 | |
Michael Zolotukhin | 80d13ba | 2015-07-28 20:07:29 +0000 | [diff] [blame] | 557 | DEBUG(dbgs() << "Analysis finished:\n" |
| 558 | << "UnrolledCost: " << UnrolledCost << ", " |
| 559 | << "RolledDynamicCost: " << RolledDynamicCost << "\n"); |
Chandler Carruth | 9dabd14 | 2015-06-05 17:01:43 +0000 | [diff] [blame] | 560 | return {{UnrolledCost, RolledDynamicCost}}; |
Chandler Carruth | 0215608 | 2015-05-22 17:41:35 +0000 | [diff] [blame] | 561 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 562 | |
Dan Gohman | 49d08a5 | 2007-05-08 15:14:19 +0000 | [diff] [blame] | 563 | /// ApproximateLoopSize - Approximate the size of the loop. |
Andrew Trick | f765601 | 2011-10-01 01:39:05 +0000 | [diff] [blame] | 564 | static unsigned ApproximateLoopSize(const Loop *L, unsigned &NumCalls, |
Justin Lebar | 6827de1 | 2016-03-14 23:15:34 +0000 | [diff] [blame] | 565 | bool &NotDuplicatable, bool &Convergent, |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 566 | const TargetTransformInfo &TTI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 567 | AssumptionCache *AC, unsigned BEInsns) { |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 568 | SmallPtrSet<const Value *, 32> EphValues; |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 569 | CodeMetrics::collectEphemeralValues(L, AC, EphValues); |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 570 | |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 571 | CodeMetrics Metrics; |
Sanjay Patel | 5c96723 | 2016-03-08 19:06:12 +0000 | [diff] [blame] | 572 | for (BasicBlock *BB : L->blocks()) |
| 573 | Metrics.analyzeBasicBlock(BB, TTI, EphValues); |
Owen Anderson | 04cf3fd | 2010-09-09 20:32:23 +0000 | [diff] [blame] | 574 | NumCalls = Metrics.NumInlineCandidates; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 575 | NotDuplicatable = Metrics.notDuplicatable; |
Justin Lebar | 6827de1 | 2016-03-14 23:15:34 +0000 | [diff] [blame] | 576 | Convergent = Metrics.convergent; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 577 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 578 | unsigned LoopSize = Metrics.NumInsts; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 579 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 580 | // Don't allow an estimate of size zero. This would allows unrolling of loops |
| 581 | // with huge iteration counts, which is a compile time problem even if it's |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 582 | // not a problem for code quality. Also, the code using this size may assume |
| 583 | // that each loop has at least three instructions (likely a conditional |
| 584 | // branch, a comparison feeding that branch, and some kind of loop increment |
| 585 | // feeding that comparison instruction). |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 586 | LoopSize = std::max(LoopSize, BEInsns + 1); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 587 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 588 | return LoopSize; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 589 | } |
| 590 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 591 | // Returns the loop hint metadata node with the given name (for example, |
| 592 | // "llvm.loop.unroll.count"). If no such metadata node exists, then nullptr is |
| 593 | // returned. |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 594 | static MDNode *GetUnrollMetadataForLoop(const Loop *L, StringRef Name) { |
| 595 | if (MDNode *LoopID = L->getLoopID()) |
| 596 | return GetUnrollMetadata(LoopID, Name); |
| 597 | return nullptr; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 598 | } |
| 599 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 600 | // Returns true if the loop has an unroll(full) pragma. |
| 601 | static bool HasUnrollFullPragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 602 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.full"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 603 | } |
| 604 | |
Mark Heffernan | 8939154 | 2015-08-10 17:28:08 +0000 | [diff] [blame] | 605 | // Returns true if the loop has an unroll(enable) pragma. This metadata is used |
| 606 | // for both "#pragma unroll" and "#pragma clang loop unroll(enable)" directives. |
| 607 | static bool HasUnrollEnablePragma(const Loop *L) { |
| 608 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.enable"); |
| 609 | } |
| 610 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 611 | // Returns true if the loop has an unroll(disable) pragma. |
| 612 | static bool HasUnrollDisablePragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 613 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.disable"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 614 | } |
| 615 | |
Kevin Qin | 715b01e | 2015-03-09 06:14:18 +0000 | [diff] [blame] | 616 | // Returns true if the loop has an runtime unroll(disable) pragma. |
| 617 | static bool HasRuntimeUnrollDisablePragma(const Loop *L) { |
| 618 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.runtime.disable"); |
| 619 | } |
| 620 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 621 | // If loop has an unroll_count pragma return the (necessarily |
| 622 | // positive) value from the pragma. Otherwise return 0. |
| 623 | static unsigned UnrollCountPragmaValue(const Loop *L) { |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 624 | MDNode *MD = GetUnrollMetadataForLoop(L, "llvm.loop.unroll.count"); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 625 | if (MD) { |
| 626 | assert(MD->getNumOperands() == 2 && |
| 627 | "Unroll count hint metadata should have two operands."); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 628 | unsigned Count = |
| 629 | mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue(); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 630 | assert(Count >= 1 && "Unroll count must be positive."); |
| 631 | return Count; |
| 632 | } |
| 633 | return 0; |
| 634 | } |
| 635 | |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 636 | // Remove existing unroll metadata and add unroll disable metadata to |
| 637 | // indicate the loop has already been unrolled. This prevents a loop |
| 638 | // from being unrolled more than is directed by a pragma if the loop |
| 639 | // unrolling pass is run more than once (which it generally is). |
| 640 | static void SetLoopAlreadyUnrolled(Loop *L) { |
| 641 | MDNode *LoopID = L->getLoopID(); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 642 | // First remove any existing loop unrolling metadata. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 643 | SmallVector<Metadata *, 4> MDs; |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 644 | // Reserve first location for self reference to the LoopID metadata node. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 645 | MDs.push_back(nullptr); |
Evgeny Stupachenko | 3e2f389 | 2016-06-08 20:21:24 +0000 | [diff] [blame] | 646 | |
| 647 | if (LoopID) { |
| 648 | for (unsigned i = 1, ie = LoopID->getNumOperands(); i < ie; ++i) { |
| 649 | bool IsUnrollMetadata = false; |
| 650 | MDNode *MD = dyn_cast<MDNode>(LoopID->getOperand(i)); |
| 651 | if (MD) { |
| 652 | const MDString *S = dyn_cast<MDString>(MD->getOperand(0)); |
| 653 | IsUnrollMetadata = S && S->getString().startswith("llvm.loop.unroll."); |
| 654 | } |
| 655 | if (!IsUnrollMetadata) |
| 656 | MDs.push_back(LoopID->getOperand(i)); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 657 | } |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 658 | } |
| 659 | |
| 660 | // Add unroll(disable) metadata to disable future unrolling. |
| 661 | LLVMContext &Context = L->getHeader()->getContext(); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 662 | SmallVector<Metadata *, 1> DisableOperands; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 663 | DisableOperands.push_back(MDString::get(Context, "llvm.loop.unroll.disable")); |
Mark Heffernan | f3764da | 2014-07-18 21:29:41 +0000 | [diff] [blame] | 664 | MDNode *DisableNode = MDNode::get(Context, DisableOperands); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 665 | MDs.push_back(DisableNode); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 666 | |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 667 | MDNode *NewLoopID = MDNode::get(Context, MDs); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 668 | // Set operand 0 to refer to the loop id itself. |
| 669 | NewLoopID->replaceOperandWith(0, NewLoopID); |
| 670 | L->setLoopID(NewLoopID); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 671 | } |
| 672 | |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 673 | // Computes the boosting factor for complete unrolling. |
| 674 | // If fully unrolling the loop would save a lot of RolledDynamicCost, it would |
| 675 | // be beneficial to fully unroll the loop even if unrolledcost is large. We |
| 676 | // use (RolledDynamicCost / UnrolledCost) to model the unroll benefits to adjust |
| 677 | // the unroll threshold. |
| 678 | static unsigned getFullUnrollBoostingFactor(const EstimatedUnrollCost &Cost, |
| 679 | unsigned MaxPercentThresholdBoost) { |
| 680 | if (Cost.RolledDynamicCost >= UINT_MAX / 100) |
| 681 | return 100; |
| 682 | else if (Cost.UnrolledCost != 0) |
| 683 | // The boosting factor is RolledDynamicCost / UnrolledCost |
| 684 | return std::min(100 * Cost.RolledDynamicCost / Cost.UnrolledCost, |
| 685 | MaxPercentThresholdBoost); |
| 686 | else |
| 687 | return MaxPercentThresholdBoost; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 688 | } |
| 689 | |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 690 | // Returns loop size estimation for unrolled loop. |
| 691 | static uint64_t getUnrolledLoopSize( |
| 692 | unsigned LoopSize, |
| 693 | TargetTransformInfo::UnrollingPreferences &UP) { |
| 694 | assert(LoopSize >= UP.BEInsns && "LoopSize should not be less than BEInsns!"); |
| 695 | return (uint64_t)(LoopSize - UP.BEInsns) * UP.Count + UP.BEInsns; |
| 696 | } |
| 697 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 698 | // Returns true if unroll count was set explicitly. |
| 699 | // Calculates unroll count and writes it to UP.Count. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 700 | static bool computeUnrollCount( |
| 701 | Loop *L, const TargetTransformInfo &TTI, DominatorTree &DT, LoopInfo *LI, |
| 702 | ScalarEvolution *SE, OptimizationRemarkEmitter *ORE, unsigned &TripCount, |
| 703 | unsigned MaxTripCount, unsigned &TripMultiple, unsigned LoopSize, |
| 704 | TargetTransformInfo::UnrollingPreferences &UP, bool &UseUpperBound) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 705 | // Check for explicit Count. |
| 706 | // 1st priority is unroll count set by "unroll-count" option. |
| 707 | bool UserUnrollCount = UnrollCount.getNumOccurrences() > 0; |
| 708 | if (UserUnrollCount) { |
| 709 | UP.Count = UnrollCount; |
| 710 | UP.AllowExpensiveTripCount = true; |
| 711 | UP.Force = true; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 712 | if (UP.AllowRemainder && getUnrolledLoopSize(LoopSize, UP) < UP.Threshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 713 | return true; |
| 714 | } |
| 715 | |
| 716 | // 2nd priority is unroll count set by pragma. |
| 717 | unsigned PragmaCount = UnrollCountPragmaValue(L); |
| 718 | if (PragmaCount > 0) { |
| 719 | UP.Count = PragmaCount; |
| 720 | UP.Runtime = true; |
| 721 | UP.AllowExpensiveTripCount = true; |
| 722 | UP.Force = true; |
| 723 | if (UP.AllowRemainder && |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 724 | getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 725 | return true; |
| 726 | } |
| 727 | bool PragmaFullUnroll = HasUnrollFullPragma(L); |
| 728 | if (PragmaFullUnroll && TripCount != 0) { |
| 729 | UP.Count = TripCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 730 | if (getUnrolledLoopSize(LoopSize, UP) < PragmaUnrollThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 731 | return false; |
| 732 | } |
| 733 | |
| 734 | bool PragmaEnableUnroll = HasUnrollEnablePragma(L); |
| 735 | bool ExplicitUnroll = PragmaCount > 0 || PragmaFullUnroll || |
| 736 | PragmaEnableUnroll || UserUnrollCount; |
| 737 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 738 | if (ExplicitUnroll && TripCount != 0) { |
| 739 | // If the loop has an unrolling pragma, we want to be more aggressive with |
| 740 | // unrolling limits. Set thresholds to at least the PragmaThreshold value |
| 741 | // which is larger than the default limits. |
| 742 | UP.Threshold = std::max<unsigned>(UP.Threshold, PragmaUnrollThreshold); |
| 743 | UP.PartialThreshold = |
| 744 | std::max<unsigned>(UP.PartialThreshold, PragmaUnrollThreshold); |
| 745 | } |
| 746 | |
| 747 | // 3rd priority is full unroll count. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 748 | // Full unroll makes sense only when TripCount or its upper bound could be |
| 749 | // statically calculated. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 750 | // Also we need to check if we exceed FullUnrollMaxCount. |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 751 | // If using the upper bound to unroll, TripMultiple should be set to 1 because |
| 752 | // we do not know when loop may exit. |
| 753 | // MaxTripCount and ExactTripCount cannot both be non zero since we only |
| 754 | // compute the former when the latter is zero. |
| 755 | unsigned ExactTripCount = TripCount; |
| 756 | assert((ExactTripCount == 0 || MaxTripCount == 0) && |
| 757 | "ExtractTripCound and MaxTripCount cannot both be non zero."); |
| 758 | unsigned FullUnrollTripCount = ExactTripCount ? ExactTripCount : MaxTripCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 759 | UP.Count = FullUnrollTripCount; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 760 | if (FullUnrollTripCount && FullUnrollTripCount <= UP.FullUnrollMaxCount) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 761 | // When computing the unrolled size, note that BEInsns are not replicated |
| 762 | // like the rest of the loop body. |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 763 | if (getUnrolledLoopSize(LoopSize, UP) < UP.Threshold) { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 764 | UseUpperBound = (MaxTripCount == FullUnrollTripCount); |
| 765 | TripCount = FullUnrollTripCount; |
| 766 | TripMultiple = UP.UpperBound ? 1 : TripMultiple; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 767 | return ExplicitUnroll; |
| 768 | } else { |
| 769 | // The loop isn't that small, but we still can fully unroll it if that |
| 770 | // helps to remove a significant number of instructions. |
| 771 | // To check that, run additional analysis on the loop. |
| 772 | if (Optional<EstimatedUnrollCost> Cost = analyzeLoopUnrollCost( |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 773 | L, FullUnrollTripCount, DT, *SE, TTI, |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 774 | UP.Threshold * UP.MaxPercentThresholdBoost / 100)) { |
| 775 | unsigned Boost = |
| 776 | getFullUnrollBoostingFactor(*Cost, UP.MaxPercentThresholdBoost); |
| 777 | if (Cost->UnrolledCost < UP.Threshold * Boost / 100) { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 778 | UseUpperBound = (MaxTripCount == FullUnrollTripCount); |
| 779 | TripCount = FullUnrollTripCount; |
| 780 | TripMultiple = UP.UpperBound ? 1 : TripMultiple; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 781 | return ExplicitUnroll; |
| 782 | } |
Dehao Chen | cc76344 | 2016-12-30 00:50:28 +0000 | [diff] [blame] | 783 | } |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 784 | } |
| 785 | } |
| 786 | |
Sanjoy Das | eed71b9 | 2017-03-03 18:19:10 +0000 | [diff] [blame] | 787 | // 4th priority is loop peeling |
| 788 | computePeelCount(L, LoopSize, UP, TripCount); |
| 789 | if (UP.PeelCount) { |
| 790 | UP.Runtime = false; |
| 791 | UP.Count = 1; |
| 792 | return ExplicitUnroll; |
| 793 | } |
| 794 | |
| 795 | // 5th priority is partial unrolling. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 796 | // Try partial unroll only when TripCount could be staticaly calculated. |
| 797 | if (TripCount) { |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 798 | UP.Partial |= ExplicitUnroll; |
| 799 | if (!UP.Partial) { |
| 800 | DEBUG(dbgs() << " will not try to unroll partially because " |
| 801 | << "-unroll-allow-partial not given\n"); |
| 802 | UP.Count = 0; |
| 803 | return false; |
| 804 | } |
Haicheng Wu | 430b3e4 | 2016-10-27 18:40:02 +0000 | [diff] [blame] | 805 | if (UP.Count == 0) |
| 806 | UP.Count = TripCount; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 807 | if (UP.PartialThreshold != NoThreshold) { |
| 808 | // Reduce unroll count to be modulo of TripCount for partial unrolling. |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 809 | if (getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
| 810 | UP.Count = |
| 811 | (std::max(UP.PartialThreshold, UP.BEInsns + 1) - UP.BEInsns) / |
| 812 | (LoopSize - UP.BEInsns); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 813 | if (UP.Count > UP.MaxCount) |
| 814 | UP.Count = UP.MaxCount; |
| 815 | while (UP.Count != 0 && TripCount % UP.Count != 0) |
| 816 | UP.Count--; |
| 817 | if (UP.AllowRemainder && UP.Count <= 1) { |
| 818 | // If there is no Count that is modulo of TripCount, set Count to |
| 819 | // largest power-of-two factor that satisfies the threshold limit. |
| 820 | // As we'll create fixup loop, do the type of unrolling only if |
| 821 | // remainder loop is allowed. |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 822 | UP.Count = UP.DefaultUnrollRuntimeCount; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 823 | while (UP.Count != 0 && |
| 824 | getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 825 | UP.Count >>= 1; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 826 | } |
| 827 | if (UP.Count < 2) { |
| 828 | if (PragmaEnableUnroll) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 829 | ORE->emit( |
| 830 | OptimizationRemarkMissed(DEBUG_TYPE, "UnrollAsDirectedTooLarge", |
| 831 | L->getStartLoc(), L->getHeader()) |
| 832 | << "Unable to unroll loop as directed by unroll(enable) pragma " |
| 833 | "because unrolled size is too large."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 834 | UP.Count = 0; |
| 835 | } |
| 836 | } else { |
| 837 | UP.Count = TripCount; |
| 838 | } |
| 839 | if ((PragmaFullUnroll || PragmaEnableUnroll) && TripCount && |
| 840 | UP.Count != TripCount) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 841 | ORE->emit( |
| 842 | OptimizationRemarkMissed(DEBUG_TYPE, "FullUnrollAsDirectedTooLarge", |
| 843 | L->getStartLoc(), L->getHeader()) |
| 844 | << "Unable to fully unroll loop as directed by unroll pragma because " |
| 845 | "unrolled size is too large."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 846 | return ExplicitUnroll; |
| 847 | } |
| 848 | assert(TripCount == 0 && |
| 849 | "All cases when TripCount is constant should be covered here."); |
| 850 | if (PragmaFullUnroll) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 851 | ORE->emit( |
| 852 | OptimizationRemarkMissed(DEBUG_TYPE, |
| 853 | "CantFullUnrollAsDirectedRuntimeTripCount", |
| 854 | L->getStartLoc(), L->getHeader()) |
| 855 | << "Unable to fully unroll loop as directed by unroll(full) pragma " |
| 856 | "because loop has a runtime trip count."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 857 | |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 858 | // 6th priority is runtime unrolling. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 859 | // Don't unroll a runtime trip count loop when it is disabled. |
| 860 | if (HasRuntimeUnrollDisablePragma(L)) { |
| 861 | UP.Count = 0; |
| 862 | return false; |
| 863 | } |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 864 | |
| 865 | // Check if the runtime trip count is too small when profile is available. |
| 866 | if (L->getHeader()->getParent()->getEntryCount()) { |
| 867 | if (auto ProfileTripCount = getLoopEstimatedTripCount(L)) { |
| 868 | if (*ProfileTripCount < FlatLoopTripCountThreshold) |
| 869 | return false; |
| 870 | else |
| 871 | UP.AllowExpensiveTripCount = true; |
| 872 | } |
| 873 | } |
| 874 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 875 | // Reduce count based on the type of unrolling and the threshold values. |
| 876 | UP.Runtime |= PragmaEnableUnroll || PragmaCount > 0 || UserUnrollCount; |
| 877 | if (!UP.Runtime) { |
| 878 | DEBUG(dbgs() << " will not try to unroll loop with runtime trip count " |
| 879 | << "-unroll-runtime not given\n"); |
| 880 | UP.Count = 0; |
| 881 | return false; |
| 882 | } |
| 883 | if (UP.Count == 0) |
Jonas Paulsson | 58c5a7f | 2016-09-28 09:41:38 +0000 | [diff] [blame] | 884 | UP.Count = UP.DefaultUnrollRuntimeCount; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 885 | |
| 886 | // Reduce unroll count to be the largest power-of-two factor of |
| 887 | // the original count which satisfies the threshold limit. |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 888 | while (UP.Count != 0 && |
| 889 | getUnrolledLoopSize(LoopSize, UP) > UP.PartialThreshold) |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 890 | UP.Count >>= 1; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 891 | |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 892 | #ifndef NDEBUG |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 893 | unsigned OrigCount = UP.Count; |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 894 | #endif |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 895 | |
| 896 | if (!UP.AllowRemainder && UP.Count != 0 && (TripMultiple % UP.Count) != 0) { |
| 897 | while (UP.Count != 0 && TripMultiple % UP.Count != 0) |
| 898 | UP.Count >>= 1; |
| 899 | DEBUG(dbgs() << "Remainder loop is restricted (that could architecture " |
| 900 | "specific or because the loop contains a convergent " |
| 901 | "instruction), so unroll count must divide the trip " |
| 902 | "multiple, " |
| 903 | << TripMultiple << ". Reducing unroll count from " |
| 904 | << OrigCount << " to " << UP.Count << ".\n"); |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 905 | using namespace ore; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 906 | if (PragmaCount > 0 && !UP.AllowRemainder) |
Adam Nemet | f57cc62 | 2016-09-30 03:44:16 +0000 | [diff] [blame] | 907 | ORE->emit( |
| 908 | OptimizationRemarkMissed(DEBUG_TYPE, |
| 909 | "DifferentUnrollCountFromDirected", |
| 910 | L->getStartLoc(), L->getHeader()) |
| 911 | << "Unable to unroll loop the number of times directed by " |
| 912 | "unroll_count pragma because remainder loop is restricted " |
| 913 | "(that could architecture specific or because the loop " |
| 914 | "contains a convergent instruction) and so must have an unroll " |
| 915 | "count that divides the loop trip multiple of " |
| 916 | << NV("TripMultiple", TripMultiple) << ". Unrolling instead " |
| 917 | << NV("UnrollCount", UP.Count) << " time(s)."); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 918 | } |
| 919 | |
| 920 | if (UP.Count > UP.MaxCount) |
| 921 | UP.Count = UP.MaxCount; |
| 922 | DEBUG(dbgs() << " partially unrolling with count: " << UP.Count << "\n"); |
| 923 | if (UP.Count < 2) |
| 924 | UP.Count = 0; |
| 925 | return ExplicitUnroll; |
| 926 | } |
| 927 | |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 928 | static bool tryToUnrollLoop(Loop *L, DominatorTree &DT, LoopInfo *LI, |
| 929 | ScalarEvolution *SE, const TargetTransformInfo &TTI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 930 | AssumptionCache &AC, OptimizationRemarkEmitter &ORE, |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 931 | bool PreserveLCSSA, int OptLevel, |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 932 | Optional<unsigned> ProvidedCount, |
| 933 | Optional<unsigned> ProvidedThreshold, |
| 934 | Optional<bool> ProvidedAllowPartial, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 935 | Optional<bool> ProvidedRuntime, |
| 936 | Optional<bool> ProvidedUpperBound) { |
Evgeny Stupachenko | b787522 | 2016-05-28 00:14:58 +0000 | [diff] [blame] | 937 | DEBUG(dbgs() << "Loop Unroll: F[" << L->getHeader()->getParent()->getName() |
| 938 | << "] Loop %" << L->getHeader()->getName() << "\n"); |
Haicheng Wu | 731b04c | 2016-11-23 19:39:26 +0000 | [diff] [blame] | 939 | if (HasUnrollDisablePragma(L)) |
| 940 | return false; |
| 941 | if (!L->isLoopSimplifyForm()) { |
| 942 | DEBUG( |
| 943 | dbgs() << " Not unrolling loop which is not in loop-simplify form.\n"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 944 | return false; |
| 945 | } |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 946 | |
| 947 | unsigned NumInlineCandidates; |
| 948 | bool NotDuplicatable; |
| 949 | bool Convergent; |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 950 | TargetTransformInfo::UnrollingPreferences UP = gatherUnrollingPreferences( |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 951 | L, TTI, OptLevel, ProvidedThreshold, ProvidedCount, ProvidedAllowPartial, |
Evgeny Stupachenko | c2698cd | 2016-11-09 19:56:39 +0000 | [diff] [blame] | 952 | ProvidedRuntime, ProvidedUpperBound); |
Haicheng Wu | 731b04c | 2016-11-23 19:39:26 +0000 | [diff] [blame] | 953 | // Exit early if unrolling is disabled. |
| 954 | if (UP.Threshold == 0 && (!UP.Partial || UP.PartialThreshold == 0)) |
| 955 | return false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 956 | unsigned LoopSize = ApproximateLoopSize( |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 957 | L, NumInlineCandidates, NotDuplicatable, Convergent, TTI, &AC, UP.BEInsns); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 958 | DEBUG(dbgs() << " Loop Size = " << LoopSize << "\n"); |
| 959 | if (NotDuplicatable) { |
| 960 | DEBUG(dbgs() << " Not unrolling loop which contains non-duplicatable" |
| 961 | << " instructions.\n"); |
| 962 | return false; |
| 963 | } |
| 964 | if (NumInlineCandidates != 0) { |
| 965 | DEBUG(dbgs() << " Not unrolling loop with inlinable calls.\n"); |
| 966 | return false; |
| 967 | } |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 968 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 969 | // Find trip count and trip multiple if count is not available |
| 970 | unsigned TripCount = 0; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 971 | unsigned MaxTripCount = 0; |
Andrew Trick | 1cabe54 | 2011-07-23 00:33:05 +0000 | [diff] [blame] | 972 | unsigned TripMultiple = 1; |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 973 | // If there are multiple exiting blocks but one of them is the latch, use the |
| 974 | // latch for the trip count estimation. Otherwise insist on a single exiting |
| 975 | // block for the trip count estimation. |
| 976 | BasicBlock *ExitingBlock = L->getLoopLatch(); |
| 977 | if (!ExitingBlock || !L->isLoopExiting(ExitingBlock)) |
| 978 | ExitingBlock = L->getExitingBlock(); |
| 979 | if (ExitingBlock) { |
| 980 | TripCount = SE->getSmallConstantTripCount(L, ExitingBlock); |
| 981 | TripMultiple = SE->getSmallConstantTripMultiple(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 982 | } |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 983 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 984 | // If the loop contains a convergent operation, the prelude we'd add |
| 985 | // to do the first few instructions before we hit the unrolled loop |
| 986 | // is unsafe -- it adds a control-flow dependency to the convergent |
| 987 | // operation. Therefore restrict remainder loop (try unrollig without). |
| 988 | // |
| 989 | // TODO: This is quite conservative. In practice, convergent_op() |
| 990 | // is likely to be called unconditionally in the loop. In this |
| 991 | // case, the program would be ill-formed (on most architectures) |
| 992 | // unless n were the same on all threads in a thread group. |
| 993 | // Assuming n is the same on all threads, any kind of unrolling is |
| 994 | // safe. But currently llvm's notion of convergence isn't powerful |
| 995 | // enough to express this. |
| 996 | if (Convergent) |
| 997 | UP.AllowRemainder = false; |
Eli Bendersky | dc6de2c | 2014-06-12 18:05:39 +0000 | [diff] [blame] | 998 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 999 | // Try to find the trip count upper bound if we cannot find the exact trip |
| 1000 | // count. |
| 1001 | bool MaxOrZero = false; |
| 1002 | if (!TripCount) { |
| 1003 | MaxTripCount = SE->getSmallConstantMaxTripCount(L); |
| 1004 | MaxOrZero = SE->isBackedgeTakenCountMaxOrZero(L); |
| 1005 | // We can unroll by the upper bound amount if it's generally allowed or if |
| 1006 | // we know that the loop is executed either the upper bound or zero times. |
| 1007 | // (MaxOrZero unrolling keeps only the first loop test, so the number of |
| 1008 | // loop tests remains the same compared to the non-unrolled version, whereas |
| 1009 | // the generic upper bound unrolling keeps all but the last loop test so the |
| 1010 | // number of loop tests goes up which may end up being worse on targets with |
| 1011 | // constriained branch predictor resources so is controlled by an option.) |
| 1012 | // In addition we only unroll small upper bounds. |
| 1013 | if (!(UP.UpperBound || MaxOrZero) || MaxTripCount > UnrollMaxUpperBound) { |
| 1014 | MaxTripCount = 0; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1015 | } |
| 1016 | } |
| 1017 | |
| 1018 | // computeUnrollCount() decides whether it is beneficial to use upper bound to |
| 1019 | // fully unroll the loop. |
| 1020 | bool UseUpperBound = false; |
| 1021 | bool IsCountSetExplicitly = |
| 1022 | computeUnrollCount(L, TTI, DT, LI, SE, &ORE, TripCount, MaxTripCount, |
| 1023 | TripMultiple, LoopSize, UP, UseUpperBound); |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1024 | if (!UP.Count) |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 1025 | return false; |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1026 | // Unroll factor (Count) must be less or equal to TripCount. |
| 1027 | if (TripCount && UP.Count > TripCount) |
| 1028 | UP.Count = TripCount; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 1029 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 1030 | // Unroll the loop. |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1031 | if (!UnrollLoop(L, UP.Count, TripCount, UP.Force, UP.Runtime, |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 1032 | UP.AllowExpensiveTripCount, UseUpperBound, MaxOrZero, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1033 | TripMultiple, UP.PeelCount, LI, SE, &DT, &AC, &ORE, |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1034 | PreserveLCSSA)) |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 1035 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 1036 | |
Evgeny Stupachenko | ea2aef4 | 2016-05-27 23:15:06 +0000 | [diff] [blame] | 1037 | // If loop has an unroll count pragma or unrolled by explicitly set count |
| 1038 | // mark loop as unrolled to prevent unrolling beyond that requested. |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1039 | // If the loop was peeled, we already "used up" the profile information |
| 1040 | // we had, so we don't want to unroll or peel again. |
| 1041 | if (IsCountSetExplicitly || UP.PeelCount) |
David L Kreitzer | 8d441eb | 2016-03-25 14:24:52 +0000 | [diff] [blame] | 1042 | SetLoopAlreadyUnrolled(L); |
Michael Kuperstein | b151a64 | 2016-11-30 21:13:57 +0000 | [diff] [blame] | 1043 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1044 | return true; |
| 1045 | } |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1046 | |
| 1047 | namespace { |
| 1048 | class LoopUnroll : public LoopPass { |
| 1049 | public: |
| 1050 | static char ID; // Pass ID, replacement for typeid |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1051 | LoopUnroll(int OptLevel = 2, Optional<unsigned> Threshold = None, |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1052 | Optional<unsigned> Count = None, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1053 | Optional<bool> AllowPartial = None, Optional<bool> Runtime = None, |
| 1054 | Optional<bool> UpperBound = None) |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1055 | : LoopPass(ID), OptLevel(OptLevel), ProvidedCount(std::move(Count)), |
Benjamin Kramer | 82de7d3 | 2016-05-27 14:27:24 +0000 | [diff] [blame] | 1056 | ProvidedThreshold(Threshold), ProvidedAllowPartial(AllowPartial), |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1057 | ProvidedRuntime(Runtime), ProvidedUpperBound(UpperBound) { |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1058 | initializeLoopUnrollPass(*PassRegistry::getPassRegistry()); |
| 1059 | } |
| 1060 | |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1061 | int OptLevel; |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1062 | Optional<unsigned> ProvidedCount; |
| 1063 | Optional<unsigned> ProvidedThreshold; |
| 1064 | Optional<bool> ProvidedAllowPartial; |
| 1065 | Optional<bool> ProvidedRuntime; |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1066 | Optional<bool> ProvidedUpperBound; |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1067 | |
| 1068 | bool runOnLoop(Loop *L, LPPassManager &) override { |
Andrew Kaylor | aa641a5 | 2016-04-22 22:06:11 +0000 | [diff] [blame] | 1069 | if (skipLoop(L)) |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1070 | return false; |
| 1071 | |
| 1072 | Function &F = *L->getHeader()->getParent(); |
| 1073 | |
| 1074 | auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); |
| 1075 | LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
| 1076 | ScalarEvolution *SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); |
| 1077 | const TargetTransformInfo &TTI = |
| 1078 | getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1079 | auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); |
Adam Nemet | 4f155b6 | 2016-08-26 15:58:34 +0000 | [diff] [blame] | 1080 | // For the old PM, we can't use OptimizationRemarkEmitter as an analysis |
| 1081 | // pass. Function analyses need to be preserved across loop transformations |
| 1082 | // but ORE cannot be preserved (see comment before the pass definition). |
| 1083 | OptimizationRemarkEmitter ORE(&F); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1084 | bool PreserveLCSSA = mustPreserveAnalysisID(LCSSAID); |
| 1085 | |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1086 | return tryToUnrollLoop(L, DT, LI, SE, TTI, AC, ORE, PreserveLCSSA, OptLevel, |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1087 | ProvidedCount, ProvidedThreshold, |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 1088 | ProvidedAllowPartial, ProvidedRuntime, |
| 1089 | ProvidedUpperBound); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1090 | } |
| 1091 | |
| 1092 | /// This transformation requires natural loop information & requires that |
| 1093 | /// loop preheaders be inserted into the CFG... |
| 1094 | /// |
| 1095 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1096 | AU.addRequired<AssumptionCacheTracker>(); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1097 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 1098 | // FIXME: Loop passes are required to preserve domtree, and for now we just |
| 1099 | // recreate dom info if anything gets unrolled. |
| 1100 | getLoopAnalysisUsage(AU); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1101 | } |
| 1102 | }; |
| 1103 | } |
| 1104 | |
| 1105 | char LoopUnroll::ID = 0; |
| 1106 | INITIALIZE_PASS_BEGIN(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1107 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 31088a9 | 2016-02-19 10:45:18 +0000 | [diff] [blame] | 1108 | INITIALIZE_PASS_DEPENDENCY(LoopPass) |
| 1109 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1110 | INITIALIZE_PASS_END(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
| 1111 | |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1112 | Pass *llvm::createLoopUnrollPass(int OptLevel, int Threshold, int Count, |
| 1113 | int AllowPartial, int Runtime, |
| 1114 | int UpperBound) { |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1115 | // TODO: It would make more sense for this function to take the optionals |
| 1116 | // directly, but that's dangerous since it would silently break out of tree |
| 1117 | // callers. |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1118 | return new LoopUnroll( |
| 1119 | OptLevel, Threshold == -1 ? None : Optional<unsigned>(Threshold), |
| 1120 | Count == -1 ? None : Optional<unsigned>(Count), |
| 1121 | AllowPartial == -1 ? None : Optional<bool>(AllowPartial), |
| 1122 | Runtime == -1 ? None : Optional<bool>(Runtime), |
| 1123 | UpperBound == -1 ? None : Optional<bool>(UpperBound)); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1124 | } |
| 1125 | |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1126 | Pass *llvm::createSimpleLoopUnrollPass(int OptLevel) { |
| 1127 | return llvm::createLoopUnrollPass(OptLevel, -1, -1, 0, 0, 0); |
Justin Bogner | b8d82ab | 2016-01-12 05:21:37 +0000 | [diff] [blame] | 1128 | } |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1129 | |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame] | 1130 | PreservedAnalyses LoopUnrollPass::run(Loop &L, LoopAnalysisManager &AM, |
| 1131 | LoopStandardAnalysisResults &AR, |
Chandler Carruth | ce40fa1 | 2017-01-25 02:49:01 +0000 | [diff] [blame] | 1132 | LPMUpdater &Updater) { |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1133 | const auto &FAM = |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame] | 1134 | AM.getResult<FunctionAnalysisManagerLoopProxy>(L, AR).getManager(); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1135 | Function *F = L.getHeader()->getParent(); |
| 1136 | |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1137 | auto *ORE = FAM.getCachedResult<OptimizationRemarkEmitterAnalysis>(*F); |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame] | 1138 | // FIXME: This should probably be optional rather than required. |
Adam Nemet | 12937c3 | 2016-07-29 19:29:47 +0000 | [diff] [blame] | 1139 | if (!ORE) |
| 1140 | report_fatal_error("LoopUnrollPass: OptimizationRemarkEmitterAnalysis not " |
| 1141 | "cached at a higher level"); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1142 | |
Chandler Carruth | ce40fa1 | 2017-01-25 02:49:01 +0000 | [diff] [blame] | 1143 | // Keep track of the previous loop structure so we can identify new loops |
| 1144 | // created by unrolling. |
| 1145 | Loop *ParentL = L.getParentLoop(); |
| 1146 | SmallPtrSet<Loop *, 4> OldLoops; |
| 1147 | if (ParentL) |
| 1148 | OldLoops.insert(ParentL->begin(), ParentL->end()); |
| 1149 | else |
| 1150 | OldLoops.insert(AR.LI.begin(), AR.LI.end()); |
| 1151 | |
Chandler Carruth | eab3b90 | 2017-01-26 02:13:50 +0000 | [diff] [blame] | 1152 | // The API here is quite complex to call, but there are only two interesting |
| 1153 | // states we support: partial and full (or "simple") unrolling. However, to |
| 1154 | // enable these things we actually pass "None" in for the optional to avoid |
| 1155 | // providing an explicit choice. |
| 1156 | Optional<bool> AllowPartialParam, RuntimeParam, UpperBoundParam; |
| 1157 | if (!AllowPartialUnrolling) |
| 1158 | AllowPartialParam = RuntimeParam = UpperBoundParam = false; |
Dehao Chen | 7d23032 | 2017-02-18 03:46:51 +0000 | [diff] [blame] | 1159 | bool Changed = tryToUnrollLoop( |
| 1160 | &L, AR.DT, &AR.LI, &AR.SE, AR.TTI, AR.AC, *ORE, |
| 1161 | /*PreserveLCSSA*/ true, OptLevel, /*Count*/ None, |
| 1162 | /*Threshold*/ None, AllowPartialParam, RuntimeParam, UpperBoundParam); |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1163 | if (!Changed) |
| 1164 | return PreservedAnalyses::all(); |
Chandler Carruth | ca68a3e | 2017-01-15 06:32:49 +0000 | [diff] [blame] | 1165 | |
Chandler Carruth | ce40fa1 | 2017-01-25 02:49:01 +0000 | [diff] [blame] | 1166 | // The parent must not be damaged by unrolling! |
| 1167 | #ifndef NDEBUG |
| 1168 | if (ParentL) |
| 1169 | ParentL->verifyLoop(); |
| 1170 | #endif |
| 1171 | |
| 1172 | // Unrolling can do several things to introduce new loops into a loop nest: |
Michael Kuperstein | 5dd55e8 | 2017-01-26 01:04:11 +0000 | [diff] [blame] | 1173 | // - Partial unrolling clones child loops within the current loop. If it |
| 1174 | // uses a remainder, then it can also create any number of sibling loops. |
Chandler Carruth | ce40fa1 | 2017-01-25 02:49:01 +0000 | [diff] [blame] | 1175 | // - Full unrolling clones child loops within the current loop but then |
| 1176 | // removes the current loop making all of the children appear to be new |
| 1177 | // sibling loops. |
| 1178 | // - Loop peeling can directly introduce new sibling loops by peeling one |
| 1179 | // iteration. |
| 1180 | // |
| 1181 | // When a new loop appears as a sibling loop, either from peeling an |
| 1182 | // iteration or fully unrolling, its nesting structure has fundamentally |
| 1183 | // changed and we want to revisit it to reflect that. |
| 1184 | // |
| 1185 | // When unrolling has removed the current loop, we need to tell the |
| 1186 | // infrastructure that it is gone. |
| 1187 | // |
| 1188 | // Finally, we support a debugging/testing mode where we revisit child loops |
| 1189 | // as well. These are not expected to require further optimizations as either |
| 1190 | // they or the loop they were cloned from have been directly visited already. |
| 1191 | // But the debugging mode allows us to check this assumption. |
| 1192 | bool IsCurrentLoopValid = false; |
| 1193 | SmallVector<Loop *, 4> SibLoops; |
| 1194 | if (ParentL) |
| 1195 | SibLoops.append(ParentL->begin(), ParentL->end()); |
| 1196 | else |
| 1197 | SibLoops.append(AR.LI.begin(), AR.LI.end()); |
| 1198 | erase_if(SibLoops, [&](Loop *SibLoop) { |
| 1199 | if (SibLoop == &L) { |
| 1200 | IsCurrentLoopValid = true; |
| 1201 | return true; |
| 1202 | } |
| 1203 | |
| 1204 | // Otherwise erase the loop from the list if it was in the old loops. |
| 1205 | return OldLoops.count(SibLoop) != 0; |
| 1206 | }); |
| 1207 | Updater.addSiblingLoops(SibLoops); |
| 1208 | |
| 1209 | if (!IsCurrentLoopValid) { |
| 1210 | Updater.markLoopAsDeleted(L); |
| 1211 | } else { |
| 1212 | // We can only walk child loops if the current loop remained valid. |
| 1213 | if (UnrollRevisitChildLoops) { |
| 1214 | // Walk *all* of the child loops. This is a highly speculative mode |
| 1215 | // anyways so look for any simplifications that arose from partial |
| 1216 | // unrolling or peeling off of iterations. |
| 1217 | SmallVector<Loop *, 4> ChildLoops(L.begin(), L.end()); |
| 1218 | Updater.addChildLoops(ChildLoops); |
| 1219 | } |
| 1220 | } |
| 1221 | |
Sean Silva | e3c18a5 | 2016-07-19 23:54:23 +0000 | [diff] [blame] | 1222 | return getLoopPassPreservedAnalyses(); |
| 1223 | } |