Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1 | //===-- LoopUnroll.cpp - Loop unroller pass -------------------------------===// |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements a simple loop unroller. It works best when loops have |
| 11 | // been canonicalized by the -indvars pass, allowing it to determine the trip |
| 12 | // counts of loops easily. |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 15 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame^] | 16 | #include "llvm/ADT/SetVector.h" |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 17 | #include "llvm/Analysis/AssumptionCache.h" |
Chris Lattner | 679572e | 2011-01-02 07:35:53 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/CodeMetrics.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 19 | #include "llvm/Analysis/LoopPass.h" |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/ScalarEvolution.h" |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 21 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | bb9caa9 | 2013-01-21 13:04:33 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/TargetTransformInfo.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 23 | #include "llvm/IR/DataLayout.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 24 | #include "llvm/IR/DiagnosticInfo.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 25 | #include "llvm/IR/Dominators.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 26 | #include "llvm/IR/IntrinsicInst.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 27 | #include "llvm/IR/Metadata.h" |
Reid Spencer | 7c16caa | 2004-09-01 22:55:40 +0000 | [diff] [blame] | 28 | #include "llvm/Support/CommandLine.h" |
| 29 | #include "llvm/Support/Debug.h" |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 30 | #include "llvm/Support/raw_ostream.h" |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 31 | #include "llvm/Transforms/Utils/UnrollLoop.h" |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 32 | #include "llvm/IR/InstVisitor.h" |
| 33 | #include "llvm/Analysis/InstructionSimplify.h" |
Duncan Sands | 67933e6 | 2008-05-16 09:30:00 +0000 | [diff] [blame] | 34 | #include <climits> |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 35 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 36 | using namespace llvm; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 37 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 38 | #define DEBUG_TYPE "loop-unroll" |
| 39 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 40 | static cl::opt<unsigned> |
Owen Anderson | d85c9cc | 2010-09-10 17:57:00 +0000 | [diff] [blame] | 41 | UnrollThreshold("unroll-threshold", cl::init(150), cl::Hidden, |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 42 | cl::desc("The cut-off point for automatic loop unrolling")); |
| 43 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 44 | static cl::opt<unsigned> UnrollMaxIterationsCountToAnalyze( |
| 45 | "unroll-max-iteration-count-to-analyze", cl::init(1000), cl::Hidden, |
| 46 | cl::desc("Don't allow loop unrolling to simulate more than this number of" |
| 47 | "iterations when checking full unroll profitability")); |
| 48 | |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 49 | static cl::opt<unsigned> UnrollMinPercentOfOptimized( |
| 50 | "unroll-percent-of-optimized-for-complete-unroll", cl::init(20), cl::Hidden, |
| 51 | cl::desc("If complete unrolling could trigger further optimizations, and, " |
| 52 | "by that, remove the given percent of instructions, perform the " |
| 53 | "complete unroll even if it's beyond the threshold")); |
| 54 | |
| 55 | static cl::opt<unsigned> UnrollAbsoluteThreshold( |
| 56 | "unroll-absolute-threshold", cl::init(2000), cl::Hidden, |
| 57 | cl::desc("Don't unroll if the unrolled size is bigger than this threshold," |
| 58 | " even if we can remove big portion of instructions later.")); |
| 59 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 60 | static cl::opt<unsigned> |
| 61 | UnrollCount("unroll-count", cl::init(0), cl::Hidden, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 62 | cl::desc("Use this unroll count for all loops including those with " |
| 63 | "unroll_count pragma values, for testing purposes")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 64 | |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 65 | static cl::opt<bool> |
| 66 | UnrollAllowPartial("unroll-allow-partial", cl::init(false), cl::Hidden, |
| 67 | cl::desc("Allows loops to be partially unrolled until " |
| 68 | "-unroll-threshold loop size is reached.")); |
| 69 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 70 | static cl::opt<bool> |
| 71 | UnrollRuntime("unroll-runtime", cl::ZeroOrMore, cl::init(false), cl::Hidden, |
| 72 | cl::desc("Unroll loops with run-time trip counts")); |
| 73 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 74 | static cl::opt<unsigned> |
| 75 | PragmaUnrollThreshold("pragma-unroll-threshold", cl::init(16 * 1024), cl::Hidden, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 76 | cl::desc("Unrolled size limit for loops with an unroll(full) or " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 77 | "unroll_count pragma.")); |
| 78 | |
Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 79 | namespace { |
Chris Lattner | 2dd09db | 2009-09-02 06:11:42 +0000 | [diff] [blame] | 80 | class LoopUnroll : public LoopPass { |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 81 | public: |
Devang Patel | 8c78a0b | 2007-05-03 01:11:54 +0000 | [diff] [blame] | 82 | static char ID; // Pass ID, replacement for typeid |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 83 | LoopUnroll(int T = -1, int C = -1, int P = -1, int R = -1) : LoopPass(ID) { |
Chris Lattner | 35a65b2 | 2011-04-14 02:27:25 +0000 | [diff] [blame] | 84 | CurrentThreshold = (T == -1) ? UnrollThreshold : unsigned(T); |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 85 | CurrentAbsoluteThreshold = UnrollAbsoluteThreshold; |
| 86 | CurrentMinPercentOfOptimized = UnrollMinPercentOfOptimized; |
Chris Lattner | 35a65b2 | 2011-04-14 02:27:25 +0000 | [diff] [blame] | 87 | CurrentCount = (C == -1) ? UnrollCount : unsigned(C); |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 88 | CurrentAllowPartial = (P == -1) ? UnrollAllowPartial : (bool)P; |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 89 | CurrentRuntime = (R == -1) ? UnrollRuntime : (bool)R; |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 90 | |
| 91 | UserThreshold = (T != -1) || (UnrollThreshold.getNumOccurrences() > 0); |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 92 | UserAbsoluteThreshold = (UnrollAbsoluteThreshold.getNumOccurrences() > 0); |
| 93 | UserPercentOfOptimized = |
| 94 | (UnrollMinPercentOfOptimized.getNumOccurrences() > 0); |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 95 | UserAllowPartial = (P != -1) || |
| 96 | (UnrollAllowPartial.getNumOccurrences() > 0); |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 97 | UserRuntime = (R != -1) || (UnrollRuntime.getNumOccurrences() > 0); |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 98 | UserCount = (C != -1) || (UnrollCount.getNumOccurrences() > 0); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 99 | |
Owen Anderson | 6c18d1a | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 100 | initializeLoopUnrollPass(*PassRegistry::getPassRegistry()); |
| 101 | } |
Devang Patel | 09f162c | 2007-05-01 21:15:47 +0000 | [diff] [blame] | 102 | |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 103 | /// A magic value for use with the Threshold parameter to indicate |
| 104 | /// that the loop unroll should be performed regardless of how much |
| 105 | /// code expansion would result. |
| 106 | static const unsigned NoThreshold = UINT_MAX; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 107 | |
Owen Anderson | a4d9c78 | 2010-09-07 23:15:30 +0000 | [diff] [blame] | 108 | // Threshold to use when optsize is specified (and there is no |
| 109 | // explicit -unroll-threshold). |
| 110 | static const unsigned OptSizeUnrollThreshold = 50; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 111 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 112 | // Default unroll count for loops with run-time trip count if |
| 113 | // -unroll-count is not set |
| 114 | static const unsigned UnrollRuntimeCount = 8; |
| 115 | |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 116 | unsigned CurrentCount; |
Owen Anderson | a4d9c78 | 2010-09-07 23:15:30 +0000 | [diff] [blame] | 117 | unsigned CurrentThreshold; |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 118 | unsigned CurrentAbsoluteThreshold; |
| 119 | unsigned CurrentMinPercentOfOptimized; |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 120 | bool CurrentAllowPartial; |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 121 | bool CurrentRuntime; |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 122 | bool UserCount; // CurrentCount is user-specified. |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 123 | bool UserThreshold; // CurrentThreshold is user-specified. |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 124 | bool UserAbsoluteThreshold; // CurrentAbsoluteThreshold is |
| 125 | // user-specified. |
| 126 | bool UserPercentOfOptimized; // CurrentMinPercentOfOptimized is |
| 127 | // user-specified. |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 128 | bool UserAllowPartial; // CurrentAllowPartial is user-specified. |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 129 | bool UserRuntime; // CurrentRuntime is user-specified. |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 130 | |
Craig Topper | 3e4c697 | 2014-03-05 09:10:37 +0000 | [diff] [blame] | 131 | bool runOnLoop(Loop *L, LPPassManager &LPM) override; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 132 | |
| 133 | /// This transformation requires natural loop information & requires that |
| 134 | /// loop preheaders be inserted into the CFG... |
| 135 | /// |
Craig Topper | 3e4c697 | 2014-03-05 09:10:37 +0000 | [diff] [blame] | 136 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 137 | AU.addRequired<AssumptionCacheTracker>(); |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 138 | AU.addRequired<LoopInfoWrapperPass>(); |
| 139 | AU.addPreserved<LoopInfoWrapperPass>(); |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 140 | AU.addRequiredID(LoopSimplifyID); |
| 141 | AU.addPreservedID(LoopSimplifyID); |
| 142 | AU.addRequiredID(LCSSAID); |
| 143 | AU.addPreservedID(LCSSAID); |
Andrew Trick | 4d0040b | 2011-08-10 04:29:49 +0000 | [diff] [blame] | 144 | AU.addRequired<ScalarEvolution>(); |
Chris Lattner | d6f46b8 | 2010-08-29 17:21:35 +0000 | [diff] [blame] | 145 | AU.addPreserved<ScalarEvolution>(); |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 146 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Devang Patel | f94b982 | 2008-07-03 07:04:22 +0000 | [diff] [blame] | 147 | // FIXME: Loop unroll requires LCSSA. And LCSSA requires dom info. |
| 148 | // If loop unroll does not preserve dom info then LCSSA pass on next |
| 149 | // loop will receive invalid dom info. |
| 150 | // For now, recreate dom info, if loop is unrolled. |
Chandler Carruth | 7352302 | 2014-01-13 13:07:17 +0000 | [diff] [blame] | 151 | AU.addPreserved<DominatorTreeWrapperPass>(); |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 152 | } |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 153 | |
| 154 | // Fill in the UnrollingPreferences parameter with values from the |
| 155 | // TargetTransformationInfo. |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 156 | void getUnrollingPreferences(Loop *L, const TargetTransformInfo &TTI, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 157 | TargetTransformInfo::UnrollingPreferences &UP) { |
| 158 | UP.Threshold = CurrentThreshold; |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 159 | UP.AbsoluteThreshold = CurrentAbsoluteThreshold; |
| 160 | UP.MinPercentOfOptimized = CurrentMinPercentOfOptimized; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 161 | UP.OptSizeThreshold = OptSizeUnrollThreshold; |
| 162 | UP.PartialThreshold = CurrentThreshold; |
| 163 | UP.PartialOptSizeThreshold = OptSizeUnrollThreshold; |
| 164 | UP.Count = CurrentCount; |
| 165 | UP.MaxCount = UINT_MAX; |
| 166 | UP.Partial = CurrentAllowPartial; |
| 167 | UP.Runtime = CurrentRuntime; |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 168 | TTI.getUnrollingPreferences(L, UP); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 169 | } |
| 170 | |
| 171 | // Select and return an unroll count based on parameters from |
| 172 | // user, unroll preferences, unroll pragmas, or a heuristic. |
| 173 | // SetExplicitly is set to true if the unroll count is is set by |
| 174 | // the user or a pragma rather than selected heuristically. |
| 175 | unsigned |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 176 | selectUnrollCount(const Loop *L, unsigned TripCount, bool PragmaFullUnroll, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 177 | unsigned PragmaCount, |
| 178 | const TargetTransformInfo::UnrollingPreferences &UP, |
| 179 | bool &SetExplicitly); |
| 180 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 181 | // Select threshold values used to limit unrolling based on a |
| 182 | // total unrolled size. Parameters Threshold and PartialThreshold |
| 183 | // are set to the maximum unrolled size for fully and partially |
| 184 | // unrolled loops respectively. |
| 185 | void selectThresholds(const Loop *L, bool HasPragma, |
| 186 | const TargetTransformInfo::UnrollingPreferences &UP, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 187 | unsigned &Threshold, unsigned &PartialThreshold, |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 188 | unsigned NumberOfOptimizedInstructions) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 189 | // Determine the current unrolling threshold. While this is |
| 190 | // normally set from UnrollThreshold, it is overridden to a |
| 191 | // smaller value if the current function is marked as |
| 192 | // optimize-for-size, and the unroll threshold was not user |
| 193 | // specified. |
| 194 | Threshold = UserThreshold ? CurrentThreshold : UP.Threshold; |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 195 | |
| 196 | // If we are allowed to completely unroll if we can remove M% of |
| 197 | // instructions, and we know that with complete unrolling we'll be able |
| 198 | // to kill N instructions, then we can afford to completely unroll loops |
| 199 | // with unrolled size up to N*100/M. |
| 200 | // Adjust the threshold according to that: |
| 201 | unsigned PercentOfOptimizedForCompleteUnroll = |
| 202 | UserPercentOfOptimized ? CurrentMinPercentOfOptimized |
| 203 | : UP.MinPercentOfOptimized; |
| 204 | unsigned AbsoluteThreshold = UserAbsoluteThreshold |
| 205 | ? CurrentAbsoluteThreshold |
| 206 | : UP.AbsoluteThreshold; |
| 207 | if (PercentOfOptimizedForCompleteUnroll) |
| 208 | Threshold = std::max<unsigned>(Threshold, |
| 209 | NumberOfOptimizedInstructions * 100 / |
| 210 | PercentOfOptimizedForCompleteUnroll); |
| 211 | // But don't allow unrolling loops bigger than absolute threshold. |
| 212 | Threshold = std::min<unsigned>(Threshold, AbsoluteThreshold); |
| 213 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 214 | PartialThreshold = UserThreshold ? CurrentThreshold : UP.PartialThreshold; |
| 215 | if (!UserThreshold && |
| 216 | L->getHeader()->getParent()->getAttributes(). |
| 217 | hasAttribute(AttributeSet::FunctionIndex, |
| 218 | Attribute::OptimizeForSize)) { |
| 219 | Threshold = UP.OptSizeThreshold; |
| 220 | PartialThreshold = UP.PartialOptSizeThreshold; |
| 221 | } |
| 222 | if (HasPragma) { |
| 223 | // If the loop has an unrolling pragma, we want to be more |
| 224 | // aggressive with unrolling limits. Set thresholds to at |
| 225 | // least the PragmaTheshold value which is larger than the |
| 226 | // default limits. |
| 227 | if (Threshold != NoThreshold) |
| 228 | Threshold = std::max<unsigned>(Threshold, PragmaUnrollThreshold); |
| 229 | if (PartialThreshold != NoThreshold) |
| 230 | PartialThreshold = |
| 231 | std::max<unsigned>(PartialThreshold, PragmaUnrollThreshold); |
| 232 | } |
| 233 | } |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 234 | }; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 235 | } |
| 236 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 237 | char LoopUnroll::ID = 0; |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 238 | INITIALIZE_PASS_BEGIN(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 239 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 240 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 241 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 242 | INITIALIZE_PASS_DEPENDENCY(LoopSimplify) |
| 243 | INITIALIZE_PASS_DEPENDENCY(LCSSA) |
Devang Patel | 88b4fa2 | 2011-10-19 23:56:07 +0000 | [diff] [blame] | 244 | INITIALIZE_PASS_DEPENDENCY(ScalarEvolution) |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 245 | INITIALIZE_PASS_END(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 246 | |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 247 | Pass *llvm::createLoopUnrollPass(int Threshold, int Count, int AllowPartial, |
| 248 | int Runtime) { |
| 249 | return new LoopUnroll(Threshold, Count, AllowPartial, Runtime); |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 250 | } |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 251 | |
Hal Finkel | 86b3064 | 2014-03-31 23:23:51 +0000 | [diff] [blame] | 252 | Pass *llvm::createSimpleLoopUnrollPass() { |
| 253 | return llvm::createLoopUnrollPass(-1, -1, 0, 0); |
| 254 | } |
| 255 | |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 256 | static bool isLoadFromConstantInitializer(Value *V) { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 257 | if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) |
| 258 | if (GV->isConstant() && GV->hasDefinitiveInitializer()) |
| 259 | return GV->getInitializer(); |
| 260 | return false; |
| 261 | } |
| 262 | |
| 263 | struct FindConstantPointers { |
| 264 | bool LoadCanBeConstantFolded; |
| 265 | bool IndexIsConstant; |
| 266 | APInt Step; |
| 267 | APInt StartValue; |
| 268 | Value *BaseAddress; |
| 269 | const Loop *L; |
| 270 | ScalarEvolution &SE; |
| 271 | FindConstantPointers(const Loop *loop, ScalarEvolution &SE) |
| 272 | : LoadCanBeConstantFolded(true), IndexIsConstant(true), L(loop), SE(SE) {} |
| 273 | |
| 274 | bool follow(const SCEV *S) { |
| 275 | if (const SCEVUnknown *SC = dyn_cast<SCEVUnknown>(S)) { |
| 276 | // We've reached the leaf node of SCEV, it's most probably just a |
| 277 | // variable. Now it's time to see if it corresponds to a global constant |
| 278 | // global (in which case we can eliminate the load), or not. |
| 279 | BaseAddress = SC->getValue(); |
| 280 | LoadCanBeConstantFolded = |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 281 | IndexIsConstant && isLoadFromConstantInitializer(BaseAddress); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 282 | return false; |
| 283 | } |
| 284 | if (isa<SCEVConstant>(S)) |
| 285 | return true; |
| 286 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) { |
| 287 | // If the current SCEV expression is AddRec, and its loop isn't the loop |
| 288 | // we are about to unroll, then we won't get a constant address after |
| 289 | // unrolling, and thus, won't be able to eliminate the load. |
| 290 | if (AR->getLoop() != L) |
| 291 | return IndexIsConstant = false; |
| 292 | // If the step isn't constant, we won't get constant addresses in unrolled |
| 293 | // version. Bail out. |
| 294 | if (const SCEVConstant *StepSE = |
| 295 | dyn_cast<SCEVConstant>(AR->getStepRecurrence(SE))) |
| 296 | Step = StepSE->getValue()->getValue(); |
| 297 | else |
| 298 | return IndexIsConstant = false; |
| 299 | |
| 300 | return IndexIsConstant; |
| 301 | } |
| 302 | // If Result is true, continue traversal. |
| 303 | // Otherwise, we have found something that prevents us from (possible) load |
| 304 | // elimination. |
| 305 | return IndexIsConstant; |
| 306 | } |
| 307 | bool isDone() const { return !IndexIsConstant; } |
| 308 | }; |
| 309 | |
| 310 | // This class is used to get an estimate of the optimization effects that we |
| 311 | // could get from complete loop unrolling. It comes from the fact that some |
| 312 | // loads might be replaced with concrete constant values and that could trigger |
| 313 | // a chain of instruction simplifications. |
| 314 | // |
| 315 | // E.g. we might have: |
| 316 | // int a[] = {0, 1, 0}; |
| 317 | // v = 0; |
| 318 | // for (i = 0; i < 3; i ++) |
| 319 | // v += b[i]*a[i]; |
| 320 | // If we completely unroll the loop, we would get: |
| 321 | // v = b[0]*a[0] + b[1]*a[1] + b[2]*a[2] |
| 322 | // Which then will be simplified to: |
| 323 | // v = b[0]* 0 + b[1]* 1 + b[2]* 0 |
| 324 | // And finally: |
| 325 | // v = b[1] |
| 326 | class UnrollAnalyzer : public InstVisitor<UnrollAnalyzer, bool> { |
| 327 | typedef InstVisitor<UnrollAnalyzer, bool> Base; |
| 328 | friend class InstVisitor<UnrollAnalyzer, bool>; |
| 329 | |
| 330 | const Loop *L; |
| 331 | unsigned TripCount; |
| 332 | ScalarEvolution &SE; |
| 333 | const TargetTransformInfo &TTI; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 334 | |
| 335 | DenseMap<Value *, Constant *> SimplifiedValues; |
| 336 | DenseMap<LoadInst *, Value *> LoadBaseAddresses; |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 337 | SmallPtrSet<Instruction *, 32> CountedInstructions; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 338 | |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 339 | /// \brief Count the number of optimized instructions. |
| 340 | unsigned NumberOfOptimizedInstructions; |
| 341 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 342 | // Provide base case for our instruction visit. |
| 343 | bool visitInstruction(Instruction &I) { return false; }; |
| 344 | // TODO: We should also visit ICmp, FCmp, GetElementPtr, Trunc, ZExt, SExt, |
| 345 | // FPTrunc, FPExt, FPToUI, FPToSI, UIToFP, SIToFP, BitCast, Select, |
| 346 | // ExtractElement, InsertElement, ShuffleVector, ExtractValue, InsertValue. |
| 347 | // |
| 348 | // Probaly it's worth to hoist the code for estimating the simplifications |
| 349 | // effects to a separate class, since we have a very similar code in |
| 350 | // InlineCost already. |
| 351 | bool visitBinaryOperator(BinaryOperator &I) { |
| 352 | Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); |
| 353 | if (!isa<Constant>(LHS)) |
| 354 | if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS)) |
| 355 | LHS = SimpleLHS; |
| 356 | if (!isa<Constant>(RHS)) |
| 357 | if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS)) |
| 358 | RHS = SimpleRHS; |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 359 | Value *SimpleV = nullptr; |
| 360 | if (auto FI = dyn_cast<FPMathOperator>(&I)) |
| 361 | SimpleV = |
| 362 | SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags()); |
| 363 | else |
| 364 | SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 365 | |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 366 | if (SimpleV && CountedInstructions.insert(&I).second) |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 367 | NumberOfOptimizedInstructions += TTI.getUserCost(&I); |
| 368 | |
| 369 | if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) { |
| 370 | SimplifiedValues[&I] = C; |
| 371 | return true; |
| 372 | } |
| 373 | return false; |
| 374 | } |
| 375 | |
| 376 | Constant *computeLoadValue(LoadInst *LI, unsigned Iteration) { |
| 377 | if (!LI) |
| 378 | return nullptr; |
| 379 | Value *BaseAddr = LoadBaseAddresses[LI]; |
| 380 | if (!BaseAddr) |
| 381 | return nullptr; |
| 382 | |
| 383 | auto GV = dyn_cast<GlobalVariable>(BaseAddr); |
| 384 | if (!GV) |
| 385 | return nullptr; |
| 386 | |
| 387 | ConstantDataSequential *CDS = |
| 388 | dyn_cast<ConstantDataSequential>(GV->getInitializer()); |
| 389 | if (!CDS) |
| 390 | return nullptr; |
| 391 | |
| 392 | const SCEV *BaseAddrSE = SE.getSCEV(BaseAddr); |
| 393 | const SCEV *S = SE.getSCEV(LI->getPointerOperand()); |
| 394 | const SCEV *OffSE = SE.getMinusSCEV(S, BaseAddrSE); |
| 395 | |
| 396 | APInt StepC, StartC; |
| 397 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(OffSE); |
| 398 | if (!AR) |
| 399 | return nullptr; |
| 400 | |
| 401 | if (const SCEVConstant *StepSE = |
| 402 | dyn_cast<SCEVConstant>(AR->getStepRecurrence(SE))) |
| 403 | StepC = StepSE->getValue()->getValue(); |
| 404 | else |
| 405 | return nullptr; |
| 406 | |
| 407 | if (const SCEVConstant *StartSE = dyn_cast<SCEVConstant>(AR->getStart())) |
| 408 | StartC = StartSE->getValue()->getValue(); |
| 409 | else |
| 410 | return nullptr; |
| 411 | |
| 412 | unsigned ElemSize = CDS->getElementType()->getPrimitiveSizeInBits() / 8U; |
| 413 | unsigned Start = StartC.getLimitedValue(); |
| 414 | unsigned Step = StepC.getLimitedValue(); |
| 415 | |
| 416 | unsigned Index = (Start + Step * Iteration) / ElemSize; |
| 417 | if (Index >= CDS->getNumElements()) |
| 418 | return nullptr; |
| 419 | |
| 420 | Constant *CV = CDS->getElementAsConstant(Index); |
| 421 | |
| 422 | return CV; |
| 423 | } |
| 424 | |
| 425 | public: |
| 426 | UnrollAnalyzer(const Loop *L, unsigned TripCount, ScalarEvolution &SE, |
| 427 | const TargetTransformInfo &TTI) |
| 428 | : L(L), TripCount(TripCount), SE(SE), TTI(TTI), |
| 429 | NumberOfOptimizedInstructions(0) {} |
| 430 | |
| 431 | // Visit all loads the loop L, and for those that, after complete loop |
| 432 | // unrolling, would have a constant address and it will point to a known |
| 433 | // constant initializer, record its base address for future use. It is used |
| 434 | // when we estimate number of potentially simplified instructions. |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 435 | void findConstFoldableLoads() { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 436 | for (auto BB : L->getBlocks()) { |
| 437 | for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) { |
| 438 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 439 | if (!LI->isSimple()) |
| 440 | continue; |
| 441 | Value *AddrOp = LI->getPointerOperand(); |
| 442 | const SCEV *S = SE.getSCEV(AddrOp); |
| 443 | FindConstantPointers Visitor(L, SE); |
| 444 | SCEVTraversal<FindConstantPointers> T(Visitor); |
| 445 | T.visitAll(S); |
| 446 | if (Visitor.IndexIsConstant && Visitor.LoadCanBeConstantFolded) { |
| 447 | LoadBaseAddresses[LI] = Visitor.BaseAddress; |
| 448 | } |
| 449 | } |
| 450 | } |
| 451 | } |
| 452 | } |
| 453 | |
| 454 | // Given a list of loads that could be constant-folded (LoadBaseAddresses), |
| 455 | // estimate number of optimized instructions after substituting the concrete |
| 456 | // values for the given Iteration. |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 457 | // Fill in SimplifiedValues map for future use in DCE-estimation. |
| 458 | unsigned estimateNumberOfSimplifiedInstructions(unsigned Iteration) { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 459 | SmallVector<Instruction *, 8> Worklist; |
| 460 | SimplifiedValues.clear(); |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 461 | CountedInstructions.clear(); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 462 | NumberOfOptimizedInstructions = 0; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 463 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 464 | // We start by adding all loads to the worklist. |
Chandler Carruth | dd6029f | 2015-02-13 02:45:17 +0000 | [diff] [blame] | 465 | for (auto &LoadDescr : LoadBaseAddresses) { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 466 | LoadInst *LI = LoadDescr.first; |
| 467 | SimplifiedValues[LI] = computeLoadValue(LI, Iteration); |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 468 | if (CountedInstructions.insert(LI).second) |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 469 | NumberOfOptimizedInstructions += TTI.getUserCost(LI); |
| 470 | |
Chandler Carruth | dd6029f | 2015-02-13 02:45:17 +0000 | [diff] [blame] | 471 | for (User *U : LI->users()) { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 472 | Instruction *UI = dyn_cast<Instruction>(U); |
| 473 | if (!UI) |
| 474 | continue; |
| 475 | if (!L->contains(UI)) |
| 476 | continue; |
| 477 | Worklist.push_back(UI); |
| 478 | } |
| 479 | } |
| 480 | |
| 481 | // And then we try to simplify every user of every instruction from the |
| 482 | // worklist. If we do simplify a user, add it to the worklist to process |
| 483 | // its users as well. |
| 484 | while (!Worklist.empty()) { |
| 485 | Instruction *I = Worklist.pop_back_val(); |
| 486 | if (!visit(I)) |
| 487 | continue; |
Chandler Carruth | dd6029f | 2015-02-13 02:45:17 +0000 | [diff] [blame] | 488 | for (User *U : I->users()) { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 489 | Instruction *UI = dyn_cast<Instruction>(U); |
| 490 | if (!UI) |
| 491 | continue; |
| 492 | if (!L->contains(UI)) |
| 493 | continue; |
| 494 | Worklist.push_back(UI); |
| 495 | } |
| 496 | } |
| 497 | return NumberOfOptimizedInstructions; |
| 498 | } |
| 499 | |
| 500 | // Given a list of potentially simplifed instructions, estimate number of |
| 501 | // instructions that would become dead if we do perform the simplification. |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 502 | unsigned estimateNumberOfDeadInstructions() { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 503 | NumberOfOptimizedInstructions = 0; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 504 | |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame^] | 505 | // We keep a set vector for the worklist so that we don't wast space in the |
| 506 | // worklist queuing up the same instruction repeatedly. This can happen due |
| 507 | // to multiple operands being the same instruction or due to the same |
| 508 | // instruction being an operand of lots of things that end up dead or |
| 509 | // simplified. |
| 510 | SmallSetVector<Instruction *, 8> Worklist; |
| 511 | |
| 512 | // The dead instructions are held in a separate set. This is used to |
| 513 | // prevent us from re-examining instructions and make sure we only count |
| 514 | // the benifit once. The worklist's internal set handles insertion |
| 515 | // deduplication. |
| 516 | SmallPtrSet<Instruction *, 16> DeadInstructions; |
Chandler Carruth | 8c86375 | 2015-02-13 03:48:38 +0000 | [diff] [blame] | 517 | |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 518 | // Lambda to enque operands onto the worklist. |
| 519 | auto EnqueueOperands = [&](Instruction &I) { |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 520 | for (auto *Op : I.operand_values()) |
| 521 | if (auto *OpI = dyn_cast<Instruction>(Op)) |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame^] | 522 | Worklist.insert(OpI); |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 523 | }; |
| 524 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 525 | // Start by initializing worklist with simplified instructions. |
Chandler Carruth | dd6029f | 2015-02-13 02:45:17 +0000 | [diff] [blame] | 526 | for (auto &FoldedKeyValue : SimplifiedValues) |
| 527 | if (auto *FoldedInst = dyn_cast<Instruction>(FoldedKeyValue.first)) { |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 528 | DeadInstructions.insert(FoldedInst); |
Chandler Carruth | 93063e6 | 2015-02-13 03:40:58 +0000 | [diff] [blame] | 529 | |
| 530 | // Add each instruction operand of this dead instruction to the |
| 531 | // worklist. |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 532 | EnqueueOperands(*FoldedInst); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 533 | } |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 534 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 535 | // If a definition of an insn is only used by simplified or dead |
| 536 | // instructions, it's also dead. Check defs of all instructions from the |
| 537 | // worklist. |
| 538 | while (!Worklist.empty()) { |
Chandler Carruth | 93063e6 | 2015-02-13 03:40:58 +0000 | [diff] [blame] | 539 | Instruction *I = Worklist.pop_back_val(); |
| 540 | if (!L->contains(I)) |
| 541 | continue; |
| 542 | if (DeadInstructions.count(I)) |
| 543 | continue; |
| 544 | if (I->getNumUses() == 0) |
| 545 | continue; |
| 546 | bool AllUsersFolded = true; |
| 547 | for (User *U : I->users()) { |
| 548 | Instruction *UI = dyn_cast<Instruction>(U); |
| 549 | if (!SimplifiedValues[UI] && !DeadInstructions.count(UI)) { |
| 550 | AllUsersFolded = false; |
| 551 | break; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 552 | } |
| 553 | } |
Chandler Carruth | 93063e6 | 2015-02-13 03:40:58 +0000 | [diff] [blame] | 554 | if (AllUsersFolded) { |
| 555 | NumberOfOptimizedInstructions += TTI.getUserCost(I); |
| 556 | DeadInstructions.insert(I); |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 557 | EnqueueOperands(*I); |
Chandler Carruth | 93063e6 | 2015-02-13 03:40:58 +0000 | [diff] [blame] | 558 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 559 | } |
| 560 | return NumberOfOptimizedInstructions; |
| 561 | } |
| 562 | }; |
| 563 | |
| 564 | // Complete loop unrolling can make some loads constant, and we need to know if |
| 565 | // that would expose any further optimization opportunities. |
| 566 | // This routine estimates this optimization effect and returns the number of |
| 567 | // instructions, that potentially might be optimized away. |
| 568 | static unsigned |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 569 | approximateNumberOfOptimizedInstructions(const Loop *L, ScalarEvolution &SE, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 570 | unsigned TripCount, |
| 571 | const TargetTransformInfo &TTI) { |
Michael Zolotukhin | 1b48019 | 2015-02-13 00:17:03 +0000 | [diff] [blame] | 572 | if (!TripCount || !UnrollMaxIterationsCountToAnalyze) |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 573 | return 0; |
| 574 | |
| 575 | UnrollAnalyzer UA(L, TripCount, SE, TTI); |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 576 | UA.findConstFoldableLoads(); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 577 | |
| 578 | // Estimate number of instructions, that could be simplified if we replace a |
| 579 | // load with the corresponding constant. Since the same load will take |
| 580 | // different values on different iterations, we have to go through all loop's |
| 581 | // iterations here. To limit ourselves here, we check only first N |
| 582 | // iterations, and then scale the found number, if necessary. |
| 583 | unsigned IterationsNumberForEstimate = |
| 584 | std::min<unsigned>(UnrollMaxIterationsCountToAnalyze, TripCount); |
| 585 | unsigned NumberOfOptimizedInstructions = 0; |
| 586 | for (unsigned i = 0; i < IterationsNumberForEstimate; ++i) { |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 587 | NumberOfOptimizedInstructions += |
| 588 | UA.estimateNumberOfSimplifiedInstructions(i); |
| 589 | NumberOfOptimizedInstructions += UA.estimateNumberOfDeadInstructions(); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 590 | } |
| 591 | NumberOfOptimizedInstructions *= TripCount / IterationsNumberForEstimate; |
| 592 | |
| 593 | return NumberOfOptimizedInstructions; |
| 594 | } |
| 595 | |
Dan Gohman | 49d08a5 | 2007-05-08 15:14:19 +0000 | [diff] [blame] | 596 | /// ApproximateLoopSize - Approximate the size of the loop. |
Andrew Trick | f765601 | 2011-10-01 01:39:05 +0000 | [diff] [blame] | 597 | static unsigned ApproximateLoopSize(const Loop *L, unsigned &NumCalls, |
Chandler Carruth | bb9caa9 | 2013-01-21 13:04:33 +0000 | [diff] [blame] | 598 | bool &NotDuplicatable, |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 599 | const TargetTransformInfo &TTI, |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 600 | AssumptionCache *AC) { |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 601 | SmallPtrSet<const Value *, 32> EphValues; |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 602 | CodeMetrics::collectEphemeralValues(L, AC, EphValues); |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 603 | |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 604 | CodeMetrics Metrics; |
Dan Gohman | 9007107 | 2008-06-22 20:18:58 +0000 | [diff] [blame] | 605 | for (Loop::block_iterator I = L->block_begin(), E = L->block_end(); |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 606 | I != E; ++I) |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 607 | Metrics.analyzeBasicBlock(*I, TTI, EphValues); |
Owen Anderson | 04cf3fd | 2010-09-09 20:32:23 +0000 | [diff] [blame] | 608 | NumCalls = Metrics.NumInlineCandidates; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 609 | NotDuplicatable = Metrics.notDuplicatable; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 610 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 611 | unsigned LoopSize = Metrics.NumInsts; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 612 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 613 | // Don't allow an estimate of size zero. This would allows unrolling of loops |
| 614 | // with huge iteration counts, which is a compile time problem even if it's |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 615 | // not a problem for code quality. Also, the code using this size may assume |
| 616 | // that each loop has at least three instructions (likely a conditional |
| 617 | // branch, a comparison feeding that branch, and some kind of loop increment |
| 618 | // feeding that comparison instruction). |
| 619 | LoopSize = std::max(LoopSize, 3u); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 620 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 621 | return LoopSize; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 622 | } |
| 623 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 624 | // Returns the loop hint metadata node with the given name (for example, |
| 625 | // "llvm.loop.unroll.count"). If no such metadata node exists, then nullptr is |
| 626 | // returned. |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 627 | static MDNode *GetUnrollMetadataForLoop(const Loop *L, StringRef Name) { |
| 628 | if (MDNode *LoopID = L->getLoopID()) |
| 629 | return GetUnrollMetadata(LoopID, Name); |
| 630 | return nullptr; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 631 | } |
| 632 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 633 | // Returns true if the loop has an unroll(full) pragma. |
| 634 | static bool HasUnrollFullPragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 635 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.full"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 636 | } |
| 637 | |
| 638 | // Returns true if the loop has an unroll(disable) pragma. |
| 639 | static bool HasUnrollDisablePragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 640 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.disable"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 641 | } |
| 642 | |
| 643 | // If loop has an unroll_count pragma return the (necessarily |
| 644 | // positive) value from the pragma. Otherwise return 0. |
| 645 | static unsigned UnrollCountPragmaValue(const Loop *L) { |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 646 | MDNode *MD = GetUnrollMetadataForLoop(L, "llvm.loop.unroll.count"); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 647 | if (MD) { |
| 648 | assert(MD->getNumOperands() == 2 && |
| 649 | "Unroll count hint metadata should have two operands."); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 650 | unsigned Count = |
| 651 | mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue(); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 652 | assert(Count >= 1 && "Unroll count must be positive."); |
| 653 | return Count; |
| 654 | } |
| 655 | return 0; |
| 656 | } |
| 657 | |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 658 | // Remove existing unroll metadata and add unroll disable metadata to |
| 659 | // indicate the loop has already been unrolled. This prevents a loop |
| 660 | // from being unrolled more than is directed by a pragma if the loop |
| 661 | // unrolling pass is run more than once (which it generally is). |
| 662 | static void SetLoopAlreadyUnrolled(Loop *L) { |
| 663 | MDNode *LoopID = L->getLoopID(); |
| 664 | if (!LoopID) return; |
| 665 | |
| 666 | // First remove any existing loop unrolling metadata. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 667 | SmallVector<Metadata *, 4> MDs; |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 668 | // Reserve first location for self reference to the LoopID metadata node. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 669 | MDs.push_back(nullptr); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 670 | for (unsigned i = 1, ie = LoopID->getNumOperands(); i < ie; ++i) { |
| 671 | bool IsUnrollMetadata = false; |
| 672 | MDNode *MD = dyn_cast<MDNode>(LoopID->getOperand(i)); |
| 673 | if (MD) { |
| 674 | const MDString *S = dyn_cast<MDString>(MD->getOperand(0)); |
| 675 | IsUnrollMetadata = S && S->getString().startswith("llvm.loop.unroll."); |
| 676 | } |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 677 | if (!IsUnrollMetadata) |
| 678 | MDs.push_back(LoopID->getOperand(i)); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 679 | } |
| 680 | |
| 681 | // Add unroll(disable) metadata to disable future unrolling. |
| 682 | LLVMContext &Context = L->getHeader()->getContext(); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 683 | SmallVector<Metadata *, 1> DisableOperands; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 684 | DisableOperands.push_back(MDString::get(Context, "llvm.loop.unroll.disable")); |
Mark Heffernan | f3764da | 2014-07-18 21:29:41 +0000 | [diff] [blame] | 685 | MDNode *DisableNode = MDNode::get(Context, DisableOperands); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 686 | MDs.push_back(DisableNode); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 687 | |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 688 | MDNode *NewLoopID = MDNode::get(Context, MDs); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 689 | // Set operand 0 to refer to the loop id itself. |
| 690 | NewLoopID->replaceOperandWith(0, NewLoopID); |
| 691 | L->setLoopID(NewLoopID); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 692 | } |
| 693 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 694 | unsigned LoopUnroll::selectUnrollCount( |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 695 | const Loop *L, unsigned TripCount, bool PragmaFullUnroll, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 696 | unsigned PragmaCount, const TargetTransformInfo::UnrollingPreferences &UP, |
| 697 | bool &SetExplicitly) { |
| 698 | SetExplicitly = true; |
| 699 | |
| 700 | // User-specified count (either as a command-line option or |
| 701 | // constructor parameter) has highest precedence. |
| 702 | unsigned Count = UserCount ? CurrentCount : 0; |
| 703 | |
| 704 | // If there is no user-specified count, unroll pragmas have the next |
| 705 | // highest precendence. |
| 706 | if (Count == 0) { |
| 707 | if (PragmaCount) { |
| 708 | Count = PragmaCount; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 709 | } else if (PragmaFullUnroll) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 710 | Count = TripCount; |
| 711 | } |
| 712 | } |
| 713 | |
| 714 | if (Count == 0) |
| 715 | Count = UP.Count; |
| 716 | |
| 717 | if (Count == 0) { |
| 718 | SetExplicitly = false; |
| 719 | if (TripCount == 0) |
| 720 | // Runtime trip count. |
| 721 | Count = UnrollRuntimeCount; |
| 722 | else |
| 723 | // Conservative heuristic: if we know the trip count, see if we can |
| 724 | // completely unroll (subject to the threshold, checked below); otherwise |
| 725 | // try to find greatest modulo of the trip count which is still under |
| 726 | // threshold value. |
| 727 | Count = TripCount; |
| 728 | } |
| 729 | if (TripCount && Count > TripCount) |
| 730 | return TripCount; |
| 731 | return Count; |
| 732 | } |
| 733 | |
Devang Patel | 9779e56 | 2007-03-07 01:38:05 +0000 | [diff] [blame] | 734 | bool LoopUnroll::runOnLoop(Loop *L, LPPassManager &LPM) { |
Paul Robinson | af4e64d | 2014-02-06 00:07:05 +0000 | [diff] [blame] | 735 | if (skipOptnoneFunction(L)) |
| 736 | return false; |
| 737 | |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 738 | Function &F = *L->getHeader()->getParent(); |
| 739 | |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 740 | LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 741 | ScalarEvolution *SE = &getAnalysis<ScalarEvolution>(); |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 742 | const TargetTransformInfo &TTI = |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 743 | getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 744 | auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 745 | |
Dan Gohman | 2e1f804 | 2007-05-08 15:19:19 +0000 | [diff] [blame] | 746 | BasicBlock *Header = L->getHeader(); |
David Greene | e0b9789 | 2010-01-05 01:27:44 +0000 | [diff] [blame] | 747 | DEBUG(dbgs() << "Loop Unroll: F[" << Header->getParent()->getName() |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 748 | << "] Loop %" << Header->getName() << "\n"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 749 | |
| 750 | if (HasUnrollDisablePragma(L)) { |
| 751 | return false; |
| 752 | } |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 753 | bool PragmaFullUnroll = HasUnrollFullPragma(L); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 754 | unsigned PragmaCount = UnrollCountPragmaValue(L); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 755 | bool HasPragma = PragmaFullUnroll || PragmaCount > 0; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 756 | |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 757 | TargetTransformInfo::UnrollingPreferences UP; |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 758 | getUnrollingPreferences(L, TTI, UP); |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 759 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 760 | // Find trip count and trip multiple if count is not available |
| 761 | unsigned TripCount = 0; |
Andrew Trick | 1cabe54 | 2011-07-23 00:33:05 +0000 | [diff] [blame] | 762 | unsigned TripMultiple = 1; |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 763 | // If there are multiple exiting blocks but one of them is the latch, use the |
| 764 | // latch for the trip count estimation. Otherwise insist on a single exiting |
| 765 | // block for the trip count estimation. |
| 766 | BasicBlock *ExitingBlock = L->getLoopLatch(); |
| 767 | if (!ExitingBlock || !L->isLoopExiting(ExitingBlock)) |
| 768 | ExitingBlock = L->getExitingBlock(); |
| 769 | if (ExitingBlock) { |
| 770 | TripCount = SE->getSmallConstantTripCount(L, ExitingBlock); |
| 771 | TripMultiple = SE->getSmallConstantTripMultiple(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 772 | } |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 773 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 774 | // Select an initial unroll count. This may be reduced later based |
| 775 | // on size thresholds. |
| 776 | bool CountSetExplicitly; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 777 | unsigned Count = selectUnrollCount(L, TripCount, PragmaFullUnroll, |
| 778 | PragmaCount, UP, CountSetExplicitly); |
Eli Bendersky | dc6de2c | 2014-06-12 18:05:39 +0000 | [diff] [blame] | 779 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 780 | unsigned NumInlineCandidates; |
| 781 | bool notDuplicatable; |
| 782 | unsigned LoopSize = |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 783 | ApproximateLoopSize(L, NumInlineCandidates, notDuplicatable, TTI, &AC); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 784 | DEBUG(dbgs() << " Loop Size = " << LoopSize << "\n"); |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 785 | |
| 786 | // When computing the unrolled size, note that the conditional branch on the |
| 787 | // backedge and the comparison feeding it are not replicated like the rest of |
| 788 | // the loop body (which is why 2 is subtracted). |
| 789 | uint64_t UnrolledSize = (uint64_t)(LoopSize-2) * Count + 2; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 790 | if (notDuplicatable) { |
| 791 | DEBUG(dbgs() << " Not unrolling loop which contains non-duplicatable" |
| 792 | << " instructions.\n"); |
| 793 | return false; |
| 794 | } |
| 795 | if (NumInlineCandidates != 0) { |
| 796 | DEBUG(dbgs() << " Not unrolling loop with inlinable calls.\n"); |
| 797 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 798 | } |
| 799 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 800 | unsigned NumberOfOptimizedInstructions = |
Chandler Carruth | 186ad60 | 2015-02-13 00:00:24 +0000 | [diff] [blame] | 801 | approximateNumberOfOptimizedInstructions(L, *SE, TripCount, TTI); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 802 | DEBUG(dbgs() << " Complete unrolling could save: " |
| 803 | << NumberOfOptimizedInstructions << "\n"); |
| 804 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 805 | unsigned Threshold, PartialThreshold; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 806 | selectThresholds(L, HasPragma, UP, Threshold, PartialThreshold, |
| 807 | NumberOfOptimizedInstructions); |
Benjamin Kramer | 9130cb8 | 2014-05-04 19:12:38 +0000 | [diff] [blame] | 808 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 809 | // Given Count, TripCount and thresholds determine the type of |
| 810 | // unrolling which is to be performed. |
| 811 | enum { Full = 0, Partial = 1, Runtime = 2 }; |
| 812 | int Unrolling; |
| 813 | if (TripCount && Count == TripCount) { |
| 814 | if (Threshold != NoThreshold && UnrolledSize > Threshold) { |
| 815 | DEBUG(dbgs() << " Too large to fully unroll with count: " << Count |
| 816 | << " because size: " << UnrolledSize << ">" << Threshold |
| 817 | << "\n"); |
| 818 | Unrolling = Partial; |
| 819 | } else { |
| 820 | Unrolling = Full; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 821 | } |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 822 | } else if (TripCount && Count < TripCount) { |
| 823 | Unrolling = Partial; |
| 824 | } else { |
| 825 | Unrolling = Runtime; |
| 826 | } |
| 827 | |
| 828 | // Reduce count based on the type of unrolling and the threshold values. |
| 829 | unsigned OriginalCount = Count; |
| 830 | bool AllowRuntime = UserRuntime ? CurrentRuntime : UP.Runtime; |
| 831 | if (Unrolling == Partial) { |
| 832 | bool AllowPartial = UserAllowPartial ? CurrentAllowPartial : UP.Partial; |
| 833 | if (!AllowPartial && !CountSetExplicitly) { |
| 834 | DEBUG(dbgs() << " will not try to unroll partially because " |
| 835 | << "-unroll-allow-partial not given\n"); |
| 836 | return false; |
| 837 | } |
| 838 | if (PartialThreshold != NoThreshold && UnrolledSize > PartialThreshold) { |
| 839 | // Reduce unroll count to be modulo of TripCount for partial unrolling. |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 840 | Count = (std::max(PartialThreshold, 3u)-2) / (LoopSize-2); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 841 | while (Count != 0 && TripCount % Count != 0) |
| 842 | Count--; |
| 843 | } |
| 844 | } else if (Unrolling == Runtime) { |
| 845 | if (!AllowRuntime && !CountSetExplicitly) { |
| 846 | DEBUG(dbgs() << " will not try to unroll loop with runtime trip count " |
| 847 | << "-unroll-runtime not given\n"); |
| 848 | return false; |
| 849 | } |
| 850 | // Reduce unroll count to be the largest power-of-two factor of |
| 851 | // the original count which satisfies the threshold limit. |
| 852 | while (Count != 0 && UnrolledSize > PartialThreshold) { |
| 853 | Count >>= 1; |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 854 | UnrolledSize = (LoopSize-2) * Count + 2; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 855 | } |
| 856 | if (Count > UP.MaxCount) |
| 857 | Count = UP.MaxCount; |
| 858 | DEBUG(dbgs() << " partially unrolling with count: " << Count << "\n"); |
| 859 | } |
| 860 | |
| 861 | if (HasPragma) { |
Mark Heffernan | 9e11244 | 2014-07-23 20:05:44 +0000 | [diff] [blame] | 862 | if (PragmaCount != 0) |
| 863 | // If loop has an unroll count pragma mark loop as unrolled to prevent |
| 864 | // unrolling beyond that requested by the pragma. |
| 865 | SetLoopAlreadyUnrolled(L); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 866 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 867 | // Emit optimization remarks if we are unable to unroll the loop |
| 868 | // as directed by a pragma. |
| 869 | DebugLoc LoopLoc = L->getStartLoc(); |
| 870 | Function *F = Header->getParent(); |
| 871 | LLVMContext &Ctx = F->getContext(); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 872 | if (PragmaFullUnroll && PragmaCount == 0) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 873 | if (TripCount && Count != TripCount) { |
| 874 | emitOptimizationRemarkMissed( |
| 875 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 876 | "Unable to fully unroll loop as directed by unroll(full) pragma " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 877 | "because unrolled size is too large."); |
| 878 | } else if (!TripCount) { |
| 879 | emitOptimizationRemarkMissed( |
| 880 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 881 | "Unable to fully unroll loop as directed by unroll(full) pragma " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 882 | "because loop has a runtime trip count."); |
| 883 | } |
| 884 | } else if (PragmaCount > 0 && Count != OriginalCount) { |
| 885 | emitOptimizationRemarkMissed( |
| 886 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
| 887 | "Unable to unroll loop the number of times directed by " |
| 888 | "unroll_count pragma because unrolled size is too large."); |
| 889 | } |
| 890 | } |
| 891 | |
| 892 | if (Unrolling != Full && Count < 2) { |
| 893 | // Partial unrolling by 1 is a nop. For full unrolling, a factor |
| 894 | // of 1 makes sense because loop control can be eliminated. |
| 895 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 896 | } |
| 897 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 898 | // Unroll the loop. |
Hal Finkel | 74c2f35 | 2014-09-07 12:44:26 +0000 | [diff] [blame] | 899 | if (!UnrollLoop(L, Count, TripCount, AllowRuntime, TripMultiple, LI, this, |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 900 | &LPM, &AC)) |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 901 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 902 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 903 | return true; |
| 904 | } |