Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 1 | //===-- LoopUnroll.cpp - Loop unroller pass -------------------------------===// |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements a simple loop unroller. It works best when loops have |
| 11 | // been canonicalized by the -indvars pass, allowing it to determine the trip |
| 12 | // counts of loops easily. |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 15 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/SetVector.h" |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 17 | #include "llvm/Analysis/AssumptionCache.h" |
Chris Lattner | 679572e | 2011-01-02 07:35:53 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/CodeMetrics.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 19 | #include "llvm/Analysis/InstructionSimplify.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/LoopPass.h" |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 21 | #include "llvm/Analysis/ScalarEvolution.h" |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | bb9caa9 | 2013-01-21 13:04:33 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/TargetTransformInfo.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 24 | #include "llvm/IR/DataLayout.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 25 | #include "llvm/IR/DiagnosticInfo.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 26 | #include "llvm/IR/Dominators.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 27 | #include "llvm/IR/InstVisitor.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 28 | #include "llvm/IR/IntrinsicInst.h" |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 29 | #include "llvm/IR/Metadata.h" |
Reid Spencer | 7c16caa | 2004-09-01 22:55:40 +0000 | [diff] [blame] | 30 | #include "llvm/Support/CommandLine.h" |
| 31 | #include "llvm/Support/Debug.h" |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 32 | #include "llvm/Support/raw_ostream.h" |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 33 | #include "llvm/Transforms/Utils/UnrollLoop.h" |
Duncan Sands | 67933e6 | 2008-05-16 09:30:00 +0000 | [diff] [blame] | 34 | #include <climits> |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 35 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 36 | using namespace llvm; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 37 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 38 | #define DEBUG_TYPE "loop-unroll" |
| 39 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 40 | static cl::opt<unsigned> |
Owen Anderson | d85c9cc | 2010-09-10 17:57:00 +0000 | [diff] [blame] | 41 | UnrollThreshold("unroll-threshold", cl::init(150), cl::Hidden, |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 42 | cl::desc("The cut-off point for automatic loop unrolling")); |
| 43 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 44 | static cl::opt<unsigned> UnrollMaxIterationsCountToAnalyze( |
Chandler Carruth | 1fbc316 | 2015-02-13 05:31:46 +0000 | [diff] [blame] | 45 | "unroll-max-iteration-count-to-analyze", cl::init(0), cl::Hidden, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 46 | cl::desc("Don't allow loop unrolling to simulate more than this number of" |
| 47 | "iterations when checking full unroll profitability")); |
| 48 | |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 49 | static cl::opt<unsigned> UnrollMinPercentOfOptimized( |
| 50 | "unroll-percent-of-optimized-for-complete-unroll", cl::init(20), cl::Hidden, |
| 51 | cl::desc("If complete unrolling could trigger further optimizations, and, " |
| 52 | "by that, remove the given percent of instructions, perform the " |
| 53 | "complete unroll even if it's beyond the threshold")); |
| 54 | |
| 55 | static cl::opt<unsigned> UnrollAbsoluteThreshold( |
| 56 | "unroll-absolute-threshold", cl::init(2000), cl::Hidden, |
| 57 | cl::desc("Don't unroll if the unrolled size is bigger than this threshold," |
| 58 | " even if we can remove big portion of instructions later.")); |
| 59 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 60 | static cl::opt<unsigned> |
| 61 | UnrollCount("unroll-count", cl::init(0), cl::Hidden, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 62 | cl::desc("Use this unroll count for all loops including those with " |
| 63 | "unroll_count pragma values, for testing purposes")); |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 64 | |
Matthijs Kooijman | 98b5c16 | 2008-07-29 13:21:23 +0000 | [diff] [blame] | 65 | static cl::opt<bool> |
| 66 | UnrollAllowPartial("unroll-allow-partial", cl::init(false), cl::Hidden, |
| 67 | cl::desc("Allows loops to be partially unrolled until " |
| 68 | "-unroll-threshold loop size is reached.")); |
| 69 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 70 | static cl::opt<bool> |
| 71 | UnrollRuntime("unroll-runtime", cl::ZeroOrMore, cl::init(false), cl::Hidden, |
| 72 | cl::desc("Unroll loops with run-time trip counts")); |
| 73 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 74 | static cl::opt<unsigned> |
| 75 | PragmaUnrollThreshold("pragma-unroll-threshold", cl::init(16 * 1024), cl::Hidden, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 76 | cl::desc("Unrolled size limit for loops with an unroll(full) or " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 77 | "unroll_count pragma.")); |
| 78 | |
Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 79 | namespace { |
Chris Lattner | 2dd09db | 2009-09-02 06:11:42 +0000 | [diff] [blame] | 80 | class LoopUnroll : public LoopPass { |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 81 | public: |
Devang Patel | 8c78a0b | 2007-05-03 01:11:54 +0000 | [diff] [blame] | 82 | static char ID; // Pass ID, replacement for typeid |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 83 | LoopUnroll(int T = -1, int C = -1, int P = -1, int R = -1) : LoopPass(ID) { |
Chris Lattner | 35a65b2 | 2011-04-14 02:27:25 +0000 | [diff] [blame] | 84 | CurrentThreshold = (T == -1) ? UnrollThreshold : unsigned(T); |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 85 | CurrentAbsoluteThreshold = UnrollAbsoluteThreshold; |
| 86 | CurrentMinPercentOfOptimized = UnrollMinPercentOfOptimized; |
Chris Lattner | 35a65b2 | 2011-04-14 02:27:25 +0000 | [diff] [blame] | 87 | CurrentCount = (C == -1) ? UnrollCount : unsigned(C); |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 88 | CurrentAllowPartial = (P == -1) ? UnrollAllowPartial : (bool)P; |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 89 | CurrentRuntime = (R == -1) ? UnrollRuntime : (bool)R; |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 90 | |
| 91 | UserThreshold = (T != -1) || (UnrollThreshold.getNumOccurrences() > 0); |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 92 | UserAbsoluteThreshold = (UnrollAbsoluteThreshold.getNumOccurrences() > 0); |
| 93 | UserPercentOfOptimized = |
| 94 | (UnrollMinPercentOfOptimized.getNumOccurrences() > 0); |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 95 | UserAllowPartial = (P != -1) || |
| 96 | (UnrollAllowPartial.getNumOccurrences() > 0); |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 97 | UserRuntime = (R != -1) || (UnrollRuntime.getNumOccurrences() > 0); |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 98 | UserCount = (C != -1) || (UnrollCount.getNumOccurrences() > 0); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 99 | |
Owen Anderson | 6c18d1a | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 100 | initializeLoopUnrollPass(*PassRegistry::getPassRegistry()); |
| 101 | } |
Devang Patel | 09f162c | 2007-05-01 21:15:47 +0000 | [diff] [blame] | 102 | |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 103 | /// A magic value for use with the Threshold parameter to indicate |
| 104 | /// that the loop unroll should be performed regardless of how much |
| 105 | /// code expansion would result. |
| 106 | static const unsigned NoThreshold = UINT_MAX; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 107 | |
Owen Anderson | a4d9c78 | 2010-09-07 23:15:30 +0000 | [diff] [blame] | 108 | // Threshold to use when optsize is specified (and there is no |
| 109 | // explicit -unroll-threshold). |
| 110 | static const unsigned OptSizeUnrollThreshold = 50; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 111 | |
Andrew Trick | d04d1529 | 2011-12-09 06:19:40 +0000 | [diff] [blame] | 112 | // Default unroll count for loops with run-time trip count if |
| 113 | // -unroll-count is not set |
| 114 | static const unsigned UnrollRuntimeCount = 8; |
| 115 | |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 116 | unsigned CurrentCount; |
Owen Anderson | a4d9c78 | 2010-09-07 23:15:30 +0000 | [diff] [blame] | 117 | unsigned CurrentThreshold; |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 118 | unsigned CurrentAbsoluteThreshold; |
| 119 | unsigned CurrentMinPercentOfOptimized; |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 120 | bool CurrentAllowPartial; |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 121 | bool CurrentRuntime; |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 122 | bool UserCount; // CurrentCount is user-specified. |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 123 | bool UserThreshold; // CurrentThreshold is user-specified. |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 124 | bool UserAbsoluteThreshold; // CurrentAbsoluteThreshold is |
| 125 | // user-specified. |
| 126 | bool UserPercentOfOptimized; // CurrentMinPercentOfOptimized is |
| 127 | // user-specified. |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 128 | bool UserAllowPartial; // CurrentAllowPartial is user-specified. |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 129 | bool UserRuntime; // CurrentRuntime is user-specified. |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 130 | |
Craig Topper | 3e4c697 | 2014-03-05 09:10:37 +0000 | [diff] [blame] | 131 | bool runOnLoop(Loop *L, LPPassManager &LPM) override; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 132 | |
| 133 | /// This transformation requires natural loop information & requires that |
| 134 | /// loop preheaders be inserted into the CFG... |
| 135 | /// |
Craig Topper | 3e4c697 | 2014-03-05 09:10:37 +0000 | [diff] [blame] | 136 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 137 | AU.addRequired<AssumptionCacheTracker>(); |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 138 | AU.addRequired<LoopInfoWrapperPass>(); |
| 139 | AU.addPreserved<LoopInfoWrapperPass>(); |
Dan Gohman | 0141c13 | 2010-07-26 18:11:16 +0000 | [diff] [blame] | 140 | AU.addRequiredID(LoopSimplifyID); |
| 141 | AU.addPreservedID(LoopSimplifyID); |
| 142 | AU.addRequiredID(LCSSAID); |
| 143 | AU.addPreservedID(LCSSAID); |
Andrew Trick | 4d0040b | 2011-08-10 04:29:49 +0000 | [diff] [blame] | 144 | AU.addRequired<ScalarEvolution>(); |
Chris Lattner | d6f46b8 | 2010-08-29 17:21:35 +0000 | [diff] [blame] | 145 | AU.addPreserved<ScalarEvolution>(); |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 146 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Devang Patel | f94b982 | 2008-07-03 07:04:22 +0000 | [diff] [blame] | 147 | // FIXME: Loop unroll requires LCSSA. And LCSSA requires dom info. |
| 148 | // If loop unroll does not preserve dom info then LCSSA pass on next |
| 149 | // loop will receive invalid dom info. |
| 150 | // For now, recreate dom info, if loop is unrolled. |
Chandler Carruth | 7352302 | 2014-01-13 13:07:17 +0000 | [diff] [blame] | 151 | AU.addPreserved<DominatorTreeWrapperPass>(); |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 152 | } |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 153 | |
| 154 | // Fill in the UnrollingPreferences parameter with values from the |
| 155 | // TargetTransformationInfo. |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 156 | void getUnrollingPreferences(Loop *L, const TargetTransformInfo &TTI, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 157 | TargetTransformInfo::UnrollingPreferences &UP) { |
| 158 | UP.Threshold = CurrentThreshold; |
Michael Zolotukhin | 7af83c1 | 2015-02-06 20:20:40 +0000 | [diff] [blame] | 159 | UP.AbsoluteThreshold = CurrentAbsoluteThreshold; |
| 160 | UP.MinPercentOfOptimized = CurrentMinPercentOfOptimized; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 161 | UP.OptSizeThreshold = OptSizeUnrollThreshold; |
| 162 | UP.PartialThreshold = CurrentThreshold; |
| 163 | UP.PartialOptSizeThreshold = OptSizeUnrollThreshold; |
| 164 | UP.Count = CurrentCount; |
| 165 | UP.MaxCount = UINT_MAX; |
| 166 | UP.Partial = CurrentAllowPartial; |
| 167 | UP.Runtime = CurrentRuntime; |
Sanjoy Das | e178f46 | 2015-04-14 03:20:38 +0000 | [diff] [blame] | 168 | UP.AllowExpensiveTripCount = false; |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 169 | TTI.getUnrollingPreferences(L, UP); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 170 | } |
| 171 | |
| 172 | // Select and return an unroll count based on parameters from |
| 173 | // user, unroll preferences, unroll pragmas, or a heuristic. |
| 174 | // SetExplicitly is set to true if the unroll count is is set by |
| 175 | // the user or a pragma rather than selected heuristically. |
| 176 | unsigned |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 177 | selectUnrollCount(const Loop *L, unsigned TripCount, bool PragmaFullUnroll, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 178 | unsigned PragmaCount, |
| 179 | const TargetTransformInfo::UnrollingPreferences &UP, |
| 180 | bool &SetExplicitly); |
| 181 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 182 | // Select threshold values used to limit unrolling based on a |
| 183 | // total unrolled size. Parameters Threshold and PartialThreshold |
| 184 | // are set to the maximum unrolled size for fully and partially |
| 185 | // unrolled loops respectively. |
| 186 | void selectThresholds(const Loop *L, bool HasPragma, |
| 187 | const TargetTransformInfo::UnrollingPreferences &UP, |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 188 | unsigned &Threshold, unsigned &PartialThreshold, |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 189 | unsigned &AbsoluteThreshold, |
| 190 | unsigned &PercentOfOptimizedForCompleteUnroll) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 191 | // Determine the current unrolling threshold. While this is |
| 192 | // normally set from UnrollThreshold, it is overridden to a |
| 193 | // smaller value if the current function is marked as |
| 194 | // optimize-for-size, and the unroll threshold was not user |
| 195 | // specified. |
| 196 | Threshold = UserThreshold ? CurrentThreshold : UP.Threshold; |
| 197 | PartialThreshold = UserThreshold ? CurrentThreshold : UP.PartialThreshold; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 198 | AbsoluteThreshold = UserAbsoluteThreshold ? CurrentAbsoluteThreshold |
| 199 | : UP.AbsoluteThreshold; |
| 200 | PercentOfOptimizedForCompleteUnroll = UserPercentOfOptimized |
| 201 | ? CurrentMinPercentOfOptimized |
| 202 | : UP.MinPercentOfOptimized; |
| 203 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 204 | if (!UserThreshold && |
Duncan P. N. Exon Smith | 2c79ad9 | 2015-02-14 01:11:29 +0000 | [diff] [blame] | 205 | L->getHeader()->getParent()->hasFnAttribute( |
| 206 | Attribute::OptimizeForSize)) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 207 | Threshold = UP.OptSizeThreshold; |
| 208 | PartialThreshold = UP.PartialOptSizeThreshold; |
| 209 | } |
| 210 | if (HasPragma) { |
| 211 | // If the loop has an unrolling pragma, we want to be more |
| 212 | // aggressive with unrolling limits. Set thresholds to at |
| 213 | // least the PragmaTheshold value which is larger than the |
| 214 | // default limits. |
| 215 | if (Threshold != NoThreshold) |
| 216 | Threshold = std::max<unsigned>(Threshold, PragmaUnrollThreshold); |
| 217 | if (PartialThreshold != NoThreshold) |
| 218 | PartialThreshold = |
| 219 | std::max<unsigned>(PartialThreshold, PragmaUnrollThreshold); |
| 220 | } |
| 221 | } |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 222 | bool canUnrollCompletely(Loop *L, unsigned Threshold, |
| 223 | unsigned AbsoluteThreshold, uint64_t UnrolledSize, |
| 224 | unsigned NumberOfOptimizedInstructions, |
| 225 | unsigned PercentOfOptimizedForCompleteUnroll); |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 226 | }; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 227 | } |
| 228 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 229 | char LoopUnroll::ID = 0; |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 230 | INITIALIZE_PASS_BEGIN(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 231 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 232 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 233 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 234 | INITIALIZE_PASS_DEPENDENCY(LoopSimplify) |
| 235 | INITIALIZE_PASS_DEPENDENCY(LCSSA) |
Devang Patel | 88b4fa2 | 2011-10-19 23:56:07 +0000 | [diff] [blame] | 236 | INITIALIZE_PASS_DEPENDENCY(ScalarEvolution) |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 237 | INITIALIZE_PASS_END(LoopUnroll, "loop-unroll", "Unroll loops", false, false) |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 238 | |
Hal Finkel | 081eaef | 2013-11-05 00:08:03 +0000 | [diff] [blame] | 239 | Pass *llvm::createLoopUnrollPass(int Threshold, int Count, int AllowPartial, |
| 240 | int Runtime) { |
| 241 | return new LoopUnroll(Threshold, Count, AllowPartial, Runtime); |
Junjie Gu | 7c3b459 | 2011-04-13 16:15:29 +0000 | [diff] [blame] | 242 | } |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 243 | |
Hal Finkel | 86b3064 | 2014-03-31 23:23:51 +0000 | [diff] [blame] | 244 | Pass *llvm::createSimpleLoopUnrollPass() { |
| 245 | return llvm::createLoopUnrollPass(-1, -1, 0, 0); |
| 246 | } |
| 247 | |
Benjamin Kramer | 51f6096c | 2015-03-23 12:30:58 +0000 | [diff] [blame] | 248 | namespace { |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 249 | /// \brief SCEV expressions visitor used for finding expressions that would |
| 250 | /// become constants if the loop L is unrolled. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 251 | struct FindConstantPointers { |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 252 | /// \brief Shows whether the expression is ConstAddress+Constant or not. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 253 | bool IndexIsConstant; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 254 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 255 | /// \brief Used for filtering out SCEV expressions with two or more AddRec |
| 256 | /// subexpressions. |
| 257 | /// |
| 258 | /// Used to filter out complicated SCEV expressions, having several AddRec |
| 259 | /// sub-expressions. We don't handle them, because unrolling one loop |
| 260 | /// would help to replace only one of these inductions with a constant, and |
| 261 | /// consequently, the expression would remain non-constant. |
| 262 | bool HaveSeenAR; |
| 263 | |
| 264 | /// \brief If the SCEV expression becomes ConstAddress+Constant, this value |
| 265 | /// holds ConstAddress. Otherwise, it's nullptr. |
| 266 | Value *BaseAddress; |
| 267 | |
| 268 | /// \brief The loop, which we try to completely unroll. |
| 269 | const Loop *L; |
| 270 | |
| 271 | ScalarEvolution &SE; |
| 272 | |
| 273 | FindConstantPointers(const Loop *L, ScalarEvolution &SE) |
| 274 | : IndexIsConstant(true), HaveSeenAR(false), BaseAddress(nullptr), |
| 275 | L(L), SE(SE) {} |
| 276 | |
| 277 | /// Examine the given expression S and figure out, if it can be a part of an |
| 278 | /// expression, that could become a constant after the loop is unrolled. |
| 279 | /// The routine sets IndexIsConstant and HaveSeenAR according to the analysis |
| 280 | /// results. |
| 281 | /// \returns true if we need to examine subexpressions, and false otherwise. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 282 | bool follow(const SCEV *S) { |
| 283 | if (const SCEVUnknown *SC = dyn_cast<SCEVUnknown>(S)) { |
| 284 | // We've reached the leaf node of SCEV, it's most probably just a |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 285 | // variable. |
| 286 | // If it's the only one SCEV-subexpression, then it might be a base |
| 287 | // address of an index expression. |
| 288 | // If we've already recorded base address, then just give up on this SCEV |
| 289 | // - it's too complicated. |
| 290 | if (BaseAddress) { |
| 291 | IndexIsConstant = false; |
| 292 | return false; |
| 293 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 294 | BaseAddress = SC->getValue(); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 295 | return false; |
| 296 | } |
| 297 | if (isa<SCEVConstant>(S)) |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 298 | return false; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 299 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) { |
| 300 | // If the current SCEV expression is AddRec, and its loop isn't the loop |
| 301 | // we are about to unroll, then we won't get a constant address after |
| 302 | // unrolling, and thus, won't be able to eliminate the load. |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 303 | if (AR->getLoop() != L) { |
| 304 | IndexIsConstant = false; |
| 305 | return false; |
| 306 | } |
| 307 | // We don't handle multiple AddRecs here, so give up in this case. |
| 308 | if (HaveSeenAR) { |
| 309 | IndexIsConstant = false; |
| 310 | return false; |
| 311 | } |
| 312 | HaveSeenAR = true; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 313 | } |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 314 | |
| 315 | // Continue traversal. |
| 316 | return true; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 317 | } |
| 318 | bool isDone() const { return !IndexIsConstant; } |
| 319 | }; |
| 320 | |
| 321 | // This class is used to get an estimate of the optimization effects that we |
| 322 | // could get from complete loop unrolling. It comes from the fact that some |
| 323 | // loads might be replaced with concrete constant values and that could trigger |
| 324 | // a chain of instruction simplifications. |
| 325 | // |
| 326 | // E.g. we might have: |
| 327 | // int a[] = {0, 1, 0}; |
| 328 | // v = 0; |
| 329 | // for (i = 0; i < 3; i ++) |
| 330 | // v += b[i]*a[i]; |
| 331 | // If we completely unroll the loop, we would get: |
| 332 | // v = b[0]*a[0] + b[1]*a[1] + b[2]*a[2] |
| 333 | // Which then will be simplified to: |
| 334 | // v = b[0]* 0 + b[1]* 1 + b[2]* 0 |
| 335 | // And finally: |
| 336 | // v = b[1] |
| 337 | class UnrollAnalyzer : public InstVisitor<UnrollAnalyzer, bool> { |
| 338 | typedef InstVisitor<UnrollAnalyzer, bool> Base; |
| 339 | friend class InstVisitor<UnrollAnalyzer, bool>; |
| 340 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 341 | struct SCEVGEPDescriptor { |
| 342 | Value *BaseAddr; |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame^] | 343 | unsigned Start; |
| 344 | unsigned Step; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 345 | }; |
| 346 | |
| 347 | /// \brief The loop we're going to analyze. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 348 | const Loop *L; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 349 | |
| 350 | /// \brief TripCount of the given loop. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 351 | unsigned TripCount; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 352 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 353 | ScalarEvolution &SE; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 354 | |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 355 | const TargetTransformInfo &TTI; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 356 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 357 | // While we walk the loop instructions, we we build up and maintain a mapping |
| 358 | // of simplified values specific to this iteration. The idea is to propagate |
| 359 | // any special information we have about loads that can be replaced with |
| 360 | // constants after complete unrolling, and account for likely simplifications |
| 361 | // post-unrolling. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 362 | DenseMap<Value *, Constant *> SimplifiedValues; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 363 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 364 | // To avoid requesting SCEV info on every iteration, request it once, and |
| 365 | // for each value that would become ConstAddress+Constant after loop |
| 366 | // unrolling, save the corresponding data. |
| 367 | SmallDenseMap<Value *, SCEVGEPDescriptor> SCEVCache; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 368 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 369 | /// \brief Number of currently simulated iteration. |
| 370 | /// |
| 371 | /// If an expression is ConstAddress+Constant, then the Constant is |
| 372 | /// Start + Iteration*Step, where Start and Step could be obtained from |
| 373 | /// SCEVCache. |
| 374 | unsigned Iteration; |
| 375 | |
| 376 | /// \brief Upper threshold for complete unrolling. |
| 377 | unsigned MaxUnrolledLoopSize; |
| 378 | |
| 379 | /// Base case for the instruction visitor. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 380 | bool visitInstruction(Instruction &I) { return false; }; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 381 | |
| 382 | /// TODO: Add visitors for other instruction types, e.g. ZExt, SExt. |
| 383 | |
| 384 | /// Try to simplify binary operator I. |
| 385 | /// |
| 386 | /// TODO: Probaly it's worth to hoist the code for estimating the |
| 387 | /// simplifications effects to a separate class, since we have a very similar |
| 388 | /// code in InlineCost already. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 389 | bool visitBinaryOperator(BinaryOperator &I) { |
| 390 | Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); |
| 391 | if (!isa<Constant>(LHS)) |
| 392 | if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS)) |
| 393 | LHS = SimpleLHS; |
| 394 | if (!isa<Constant>(RHS)) |
| 395 | if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS)) |
| 396 | RHS = SimpleRHS; |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 397 | Value *SimpleV = nullptr; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 398 | const DataLayout &DL = I.getModule()->getDataLayout(); |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 399 | if (auto FI = dyn_cast<FPMathOperator>(&I)) |
| 400 | SimpleV = |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 401 | SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL); |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 402 | else |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 403 | SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 404 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 405 | if (SimpleV) |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 406 | NumberOfOptimizedInstructions += TTI.getUserCost(&I); |
| 407 | |
| 408 | if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) { |
| 409 | SimplifiedValues[&I] = C; |
| 410 | return true; |
| 411 | } |
| 412 | return false; |
| 413 | } |
| 414 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 415 | /// Try to fold load I. |
| 416 | bool visitLoad(LoadInst &I) { |
| 417 | Value *AddrOp = I.getPointerOperand(); |
| 418 | if (!isa<Constant>(AddrOp)) |
| 419 | if (Constant *SimplifiedAddrOp = SimplifiedValues.lookup(AddrOp)) |
| 420 | AddrOp = SimplifiedAddrOp; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 421 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 422 | auto It = SCEVCache.find(AddrOp); |
| 423 | if (It == SCEVCache.end()) |
| 424 | return false; |
| 425 | SCEVGEPDescriptor GEPDesc = It->second; |
| 426 | |
| 427 | auto GV = dyn_cast<GlobalVariable>(GEPDesc.BaseAddr); |
| 428 | // We're only interested in loads that can be completely folded to a |
| 429 | // constant. |
| 430 | if (!GV || !GV->hasInitializer()) |
| 431 | return false; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 432 | |
| 433 | ConstantDataSequential *CDS = |
| 434 | dyn_cast<ConstantDataSequential>(GV->getInitializer()); |
| 435 | if (!CDS) |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 436 | return false; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 437 | |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame^] | 438 | // This calculation should never overflow because we bound Iteration quite |
| 439 | // low and both the start and step are 32-bit integers. We use signed |
| 440 | // integers so that UBSan will catch if a bug sneaks into the code. |
| 441 | int ElemSize = CDS->getElementType()->getPrimitiveSizeInBits() / 8U; |
| 442 | int64_t Index = ((int64_t)GEPDesc.Start + |
| 443 | (int64_t)GEPDesc.Step * (int64_t)Iteration) / |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 444 | ElemSize; |
| 445 | if (Index >= CDS->getNumElements()) { |
| 446 | // FIXME: For now we conservatively ignore out of bound accesses, but |
| 447 | // we're allowed to perform the optimization in this case. |
| 448 | return false; |
| 449 | } |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 450 | |
| 451 | Constant *CV = CDS->getElementAsConstant(Index); |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 452 | assert(CV && "Constant expected."); |
| 453 | SimplifiedValues[&I] = CV; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 454 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 455 | NumberOfOptimizedInstructions += TTI.getUserCost(&I); |
| 456 | return true; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 457 | } |
| 458 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 459 | /// Visit all GEPs in the loop and find those which after complete loop |
| 460 | /// unrolling would become a constant, or BaseAddress+Constant. |
| 461 | /// |
| 462 | /// Such GEPs could allow to evaluate a load to a constant later - for now we |
| 463 | /// just store the corresponding BaseAddress and StartValue with StepValue in |
| 464 | /// the SCEVCache. |
| 465 | void cacheSCEVResults() { |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 466 | for (auto BB : L->getBlocks()) { |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 467 | for (Instruction &I : *BB) { |
| 468 | if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) { |
| 469 | Value *V = cast<Value>(GEP); |
| 470 | if (!SE.isSCEVable(V->getType())) |
| 471 | continue; |
| 472 | const SCEV *S = SE.getSCEV(V); |
| 473 | // FIXME: Hoist the initialization out of the loop. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 474 | FindConstantPointers Visitor(L, SE); |
| 475 | SCEVTraversal<FindConstantPointers> T(Visitor); |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 476 | // Try to find (BaseAddress+Step+Offset) tuple. |
| 477 | // If succeeded, save it to the cache - it might help in folding |
| 478 | // loads. |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 479 | T.visitAll(S); |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 480 | if (!Visitor.IndexIsConstant || !Visitor.BaseAddress) |
| 481 | continue; |
| 482 | |
| 483 | const SCEV *BaseAddrSE = SE.getSCEV(Visitor.BaseAddress); |
| 484 | if (BaseAddrSE->getType() != S->getType()) |
| 485 | continue; |
| 486 | const SCEV *OffSE = SE.getMinusSCEV(S, BaseAddrSE); |
| 487 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(OffSE); |
| 488 | |
| 489 | if (!AR) |
| 490 | continue; |
| 491 | |
| 492 | const SCEVConstant *StepSE = |
| 493 | dyn_cast<SCEVConstant>(AR->getStepRecurrence(SE)); |
| 494 | const SCEVConstant *StartSE = dyn_cast<SCEVConstant>(AR->getStart()); |
| 495 | if (!StepSE || !StartSE) |
| 496 | continue; |
| 497 | |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame^] | 498 | // Check and skip caching if doing so would require lots of bits to |
| 499 | // avoid overflow. |
| 500 | APInt Start = StartSE->getValue()->getValue(); |
| 501 | APInt Step = StepSE->getValue()->getValue(); |
| 502 | if (Start.getActiveBits() > 32 || Step.getActiveBits() > 32) |
| 503 | continue; |
| 504 | |
| 505 | // We found a cacheable SCEV model for the GEP. |
| 506 | SCEVCache[V] = {Visitor.BaseAddress, |
| 507 | (unsigned)Start.getLimitedValue(), |
| 508 | (unsigned)Step.getLimitedValue()}; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 509 | } |
| 510 | } |
| 511 | } |
| 512 | } |
| 513 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 514 | public: |
| 515 | UnrollAnalyzer(const Loop *L, unsigned TripCount, ScalarEvolution &SE, |
| 516 | const TargetTransformInfo &TTI, unsigned MaxUnrolledLoopSize) |
| 517 | : L(L), TripCount(TripCount), SE(SE), TTI(TTI), |
| 518 | MaxUnrolledLoopSize(MaxUnrolledLoopSize), |
| 519 | NumberOfOptimizedInstructions(0), UnrolledLoopSize(0) {} |
Chandler Carruth | 6c03dff | 2015-02-13 04:39:05 +0000 | [diff] [blame] | 520 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 521 | /// \brief Count the number of optimized instructions. |
| 522 | unsigned NumberOfOptimizedInstructions; |
Chandler Carruth | 302a133 | 2015-02-13 02:10:56 +0000 | [diff] [blame] | 523 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 524 | /// \brief Count the total number of instructions. |
| 525 | unsigned UnrolledLoopSize; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 526 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 527 | /// \brief Figure out if the loop is worth full unrolling. |
| 528 | /// |
| 529 | /// Complete loop unrolling can make some loads constant, and we need to know |
| 530 | /// if that would expose any further optimization opportunities. This routine |
| 531 | /// estimates this optimization. It assigns computed number of instructions, |
| 532 | /// that potentially might be optimized away, to |
| 533 | /// NumberOfOptimizedInstructions, and total number of instructions to |
| 534 | /// UnrolledLoopSize (not counting blocks that won't be reached, if we were |
| 535 | /// able to compute the condition). |
| 536 | /// \returns false if we can't analyze the loop, or if we discovered that |
| 537 | /// unrolling won't give anything. Otherwise, returns true. |
| 538 | bool analyzeLoop() { |
| 539 | SmallSetVector<BasicBlock *, 16> BBWorklist; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 540 | |
Chandler Carruth | a6ae877 | 2015-05-12 23:32:56 +0000 | [diff] [blame^] | 541 | // We want to be able to scale offsets by the trip count and add more |
| 542 | // offsets to them without checking for overflows, and we already don't want |
| 543 | // to analyze *massive* trip counts, so we force the max to be reasonably |
| 544 | // small. |
| 545 | assert(UnrollMaxIterationsCountToAnalyze < (INT_MAX / 2) && |
| 546 | "The unroll iterations max is too large!"); |
| 547 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 548 | // Don't simulate loops with a big or unknown tripcount |
| 549 | if (!UnrollMaxIterationsCountToAnalyze || !TripCount || |
| 550 | TripCount > UnrollMaxIterationsCountToAnalyze) |
| 551 | return false; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 552 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 553 | // To avoid compute SCEV-expressions on every iteration, compute them once |
| 554 | // and store interesting to us in SCEVCache. |
| 555 | cacheSCEVResults(); |
Chandler Carruth | 3b057b3 | 2015-02-13 03:57:40 +0000 | [diff] [blame] | 556 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 557 | // Simulate execution of each iteration of the loop counting instructions, |
| 558 | // which would be simplified. |
| 559 | // Since the same load will take different values on different iterations, |
| 560 | // we literally have to go through all loop's iterations. |
| 561 | for (Iteration = 0; Iteration < TripCount; ++Iteration) { |
| 562 | SimplifiedValues.clear(); |
| 563 | BBWorklist.clear(); |
| 564 | BBWorklist.insert(L->getHeader()); |
| 565 | // Note that we *must not* cache the size, this loop grows the worklist. |
| 566 | for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) { |
| 567 | BasicBlock *BB = BBWorklist[Idx]; |
Chandler Carruth | 8c86375 | 2015-02-13 03:48:38 +0000 | [diff] [blame] | 568 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 569 | // Visit all instructions in the given basic block and try to simplify |
| 570 | // it. We don't change the actual IR, just count optimization |
| 571 | // opportunities. |
| 572 | for (Instruction &I : *BB) { |
| 573 | UnrolledLoopSize += TTI.getUserCost(&I); |
| 574 | Base::visit(I); |
| 575 | // If unrolled body turns out to be too big, bail out. |
| 576 | if (UnrolledLoopSize - NumberOfOptimizedInstructions > |
| 577 | MaxUnrolledLoopSize) |
| 578 | return false; |
| 579 | } |
Chandler Carruth | 17a0496 | 2015-02-13 03:49:41 +0000 | [diff] [blame] | 580 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 581 | // Add BB's successors to the worklist. |
| 582 | for (BasicBlock *Succ : successors(BB)) |
| 583 | if (L->contains(Succ)) |
| 584 | BBWorklist.insert(Succ); |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 585 | } |
Chandler Carruth | 415f412 | 2015-02-13 02:17:39 +0000 | [diff] [blame] | 586 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 587 | // If we found no optimization opportunities on the first iteration, we |
| 588 | // won't find them on later ones too. |
| 589 | if (!NumberOfOptimizedInstructions) |
| 590 | return false; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 591 | } |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 592 | return true; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 593 | } |
| 594 | }; |
Benjamin Kramer | 51f6096c | 2015-03-23 12:30:58 +0000 | [diff] [blame] | 595 | } // namespace |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 596 | |
Dan Gohman | 49d08a5 | 2007-05-08 15:14:19 +0000 | [diff] [blame] | 597 | /// ApproximateLoopSize - Approximate the size of the loop. |
Andrew Trick | f765601 | 2011-10-01 01:39:05 +0000 | [diff] [blame] | 598 | static unsigned ApproximateLoopSize(const Loop *L, unsigned &NumCalls, |
Chandler Carruth | bb9caa9 | 2013-01-21 13:04:33 +0000 | [diff] [blame] | 599 | bool &NotDuplicatable, |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 600 | const TargetTransformInfo &TTI, |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 601 | AssumptionCache *AC) { |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 602 | SmallPtrSet<const Value *, 32> EphValues; |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 603 | CodeMetrics::collectEphemeralValues(L, AC, EphValues); |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 604 | |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 605 | CodeMetrics Metrics; |
Dan Gohman | 9007107 | 2008-06-22 20:18:58 +0000 | [diff] [blame] | 606 | for (Loop::block_iterator I = L->block_begin(), E = L->block_end(); |
Dan Gohman | 969e83a | 2009-10-31 14:54:17 +0000 | [diff] [blame] | 607 | I != E; ++I) |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 608 | Metrics.analyzeBasicBlock(*I, TTI, EphValues); |
Owen Anderson | 04cf3fd | 2010-09-09 20:32:23 +0000 | [diff] [blame] | 609 | NumCalls = Metrics.NumInlineCandidates; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 610 | NotDuplicatable = Metrics.notDuplicatable; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 611 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 612 | unsigned LoopSize = Metrics.NumInsts; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 613 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 614 | // Don't allow an estimate of size zero. This would allows unrolling of loops |
| 615 | // with huge iteration counts, which is a compile time problem even if it's |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 616 | // not a problem for code quality. Also, the code using this size may assume |
| 617 | // that each loop has at least three instructions (likely a conditional |
| 618 | // branch, a comparison feeding that branch, and some kind of loop increment |
| 619 | // feeding that comparison instruction). |
| 620 | LoopSize = std::max(LoopSize, 3u); |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 621 | |
Owen Anderson | 62ea1b7 | 2010-09-09 19:07:31 +0000 | [diff] [blame] | 622 | return LoopSize; |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 623 | } |
| 624 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 625 | // Returns the loop hint metadata node with the given name (for example, |
| 626 | // "llvm.loop.unroll.count"). If no such metadata node exists, then nullptr is |
| 627 | // returned. |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 628 | static MDNode *GetUnrollMetadataForLoop(const Loop *L, StringRef Name) { |
| 629 | if (MDNode *LoopID = L->getLoopID()) |
| 630 | return GetUnrollMetadata(LoopID, Name); |
| 631 | return nullptr; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 632 | } |
| 633 | |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 634 | // Returns true if the loop has an unroll(full) pragma. |
| 635 | static bool HasUnrollFullPragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 636 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.full"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 637 | } |
| 638 | |
| 639 | // Returns true if the loop has an unroll(disable) pragma. |
| 640 | static bool HasUnrollDisablePragma(const Loop *L) { |
Jingyue Wu | 0220df0 | 2015-02-01 02:27:45 +0000 | [diff] [blame] | 641 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.disable"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 642 | } |
| 643 | |
Kevin Qin | 715b01e | 2015-03-09 06:14:18 +0000 | [diff] [blame] | 644 | // Returns true if the loop has an runtime unroll(disable) pragma. |
| 645 | static bool HasRuntimeUnrollDisablePragma(const Loop *L) { |
| 646 | return GetUnrollMetadataForLoop(L, "llvm.loop.unroll.runtime.disable"); |
| 647 | } |
| 648 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 649 | // If loop has an unroll_count pragma return the (necessarily |
| 650 | // positive) value from the pragma. Otherwise return 0. |
| 651 | static unsigned UnrollCountPragmaValue(const Loop *L) { |
Jingyue Wu | 49a766e | 2015-02-02 20:41:11 +0000 | [diff] [blame] | 652 | MDNode *MD = GetUnrollMetadataForLoop(L, "llvm.loop.unroll.count"); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 653 | if (MD) { |
| 654 | assert(MD->getNumOperands() == 2 && |
| 655 | "Unroll count hint metadata should have two operands."); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 656 | unsigned Count = |
| 657 | mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue(); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 658 | assert(Count >= 1 && "Unroll count must be positive."); |
| 659 | return Count; |
| 660 | } |
| 661 | return 0; |
| 662 | } |
| 663 | |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 664 | // Remove existing unroll metadata and add unroll disable metadata to |
| 665 | // indicate the loop has already been unrolled. This prevents a loop |
| 666 | // from being unrolled more than is directed by a pragma if the loop |
| 667 | // unrolling pass is run more than once (which it generally is). |
| 668 | static void SetLoopAlreadyUnrolled(Loop *L) { |
| 669 | MDNode *LoopID = L->getLoopID(); |
| 670 | if (!LoopID) return; |
| 671 | |
| 672 | // First remove any existing loop unrolling metadata. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 673 | SmallVector<Metadata *, 4> MDs; |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 674 | // Reserve first location for self reference to the LoopID metadata node. |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 675 | MDs.push_back(nullptr); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 676 | for (unsigned i = 1, ie = LoopID->getNumOperands(); i < ie; ++i) { |
| 677 | bool IsUnrollMetadata = false; |
| 678 | MDNode *MD = dyn_cast<MDNode>(LoopID->getOperand(i)); |
| 679 | if (MD) { |
| 680 | const MDString *S = dyn_cast<MDString>(MD->getOperand(0)); |
| 681 | IsUnrollMetadata = S && S->getString().startswith("llvm.loop.unroll."); |
| 682 | } |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 683 | if (!IsUnrollMetadata) |
| 684 | MDs.push_back(LoopID->getOperand(i)); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 685 | } |
| 686 | |
| 687 | // Add unroll(disable) metadata to disable future unrolling. |
| 688 | LLVMContext &Context = L->getHeader()->getContext(); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 689 | SmallVector<Metadata *, 1> DisableOperands; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 690 | DisableOperands.push_back(MDString::get(Context, "llvm.loop.unroll.disable")); |
Mark Heffernan | f3764da | 2014-07-18 21:29:41 +0000 | [diff] [blame] | 691 | MDNode *DisableNode = MDNode::get(Context, DisableOperands); |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 692 | MDs.push_back(DisableNode); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 693 | |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 694 | MDNode *NewLoopID = MDNode::get(Context, MDs); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 695 | // Set operand 0 to refer to the loop id itself. |
| 696 | NewLoopID->replaceOperandWith(0, NewLoopID); |
| 697 | L->setLoopID(NewLoopID); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 698 | } |
| 699 | |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 700 | bool LoopUnroll::canUnrollCompletely( |
| 701 | Loop *L, unsigned Threshold, unsigned AbsoluteThreshold, |
| 702 | uint64_t UnrolledSize, unsigned NumberOfOptimizedInstructions, |
| 703 | unsigned PercentOfOptimizedForCompleteUnroll) { |
| 704 | |
| 705 | if (Threshold == NoThreshold) { |
| 706 | DEBUG(dbgs() << " Can fully unroll, because no threshold is set.\n"); |
| 707 | return true; |
| 708 | } |
| 709 | |
| 710 | if (UnrolledSize <= Threshold) { |
| 711 | DEBUG(dbgs() << " Can fully unroll, because unrolled size: " |
| 712 | << UnrolledSize << "<" << Threshold << "\n"); |
| 713 | return true; |
| 714 | } |
| 715 | |
| 716 | assert(UnrolledSize && "UnrolledSize can't be 0 at this point."); |
| 717 | unsigned PercentOfOptimizedInstructions = |
| 718 | (uint64_t)NumberOfOptimizedInstructions * 100ull / UnrolledSize; |
| 719 | |
| 720 | if (UnrolledSize <= AbsoluteThreshold && |
| 721 | PercentOfOptimizedInstructions >= PercentOfOptimizedForCompleteUnroll) { |
| 722 | DEBUG(dbgs() << " Can fully unroll, because unrolling will help removing " |
| 723 | << PercentOfOptimizedInstructions |
| 724 | << "% instructions (threshold: " |
| 725 | << PercentOfOptimizedForCompleteUnroll << "%)\n"); |
| 726 | DEBUG(dbgs() << " Unrolled size (" << UnrolledSize |
| 727 | << ") is less than the threshold (" << AbsoluteThreshold |
| 728 | << ").\n"); |
| 729 | return true; |
| 730 | } |
| 731 | |
| 732 | DEBUG(dbgs() << " Too large to fully unroll:\n"); |
| 733 | DEBUG(dbgs() << " Unrolled size: " << UnrolledSize << "\n"); |
| 734 | DEBUG(dbgs() << " Estimated number of optimized instructions: " |
| 735 | << NumberOfOptimizedInstructions << "\n"); |
| 736 | DEBUG(dbgs() << " Absolute threshold: " << AbsoluteThreshold << "\n"); |
| 737 | DEBUG(dbgs() << " Minimum percent of removed instructions: " |
| 738 | << PercentOfOptimizedForCompleteUnroll << "\n"); |
| 739 | DEBUG(dbgs() << " Threshold for small loops: " << Threshold << "\n"); |
| 740 | return false; |
| 741 | } |
| 742 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 743 | unsigned LoopUnroll::selectUnrollCount( |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 744 | const Loop *L, unsigned TripCount, bool PragmaFullUnroll, |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 745 | unsigned PragmaCount, const TargetTransformInfo::UnrollingPreferences &UP, |
| 746 | bool &SetExplicitly) { |
| 747 | SetExplicitly = true; |
| 748 | |
| 749 | // User-specified count (either as a command-line option or |
| 750 | // constructor parameter) has highest precedence. |
| 751 | unsigned Count = UserCount ? CurrentCount : 0; |
| 752 | |
| 753 | // If there is no user-specified count, unroll pragmas have the next |
| 754 | // highest precendence. |
| 755 | if (Count == 0) { |
| 756 | if (PragmaCount) { |
| 757 | Count = PragmaCount; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 758 | } else if (PragmaFullUnroll) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 759 | Count = TripCount; |
| 760 | } |
| 761 | } |
| 762 | |
| 763 | if (Count == 0) |
| 764 | Count = UP.Count; |
| 765 | |
| 766 | if (Count == 0) { |
| 767 | SetExplicitly = false; |
| 768 | if (TripCount == 0) |
| 769 | // Runtime trip count. |
| 770 | Count = UnrollRuntimeCount; |
| 771 | else |
| 772 | // Conservative heuristic: if we know the trip count, see if we can |
| 773 | // completely unroll (subject to the threshold, checked below); otherwise |
| 774 | // try to find greatest modulo of the trip count which is still under |
| 775 | // threshold value. |
| 776 | Count = TripCount; |
| 777 | } |
| 778 | if (TripCount && Count > TripCount) |
| 779 | return TripCount; |
| 780 | return Count; |
| 781 | } |
| 782 | |
Devang Patel | 9779e56 | 2007-03-07 01:38:05 +0000 | [diff] [blame] | 783 | bool LoopUnroll::runOnLoop(Loop *L, LPPassManager &LPM) { |
Paul Robinson | af4e64d | 2014-02-06 00:07:05 +0000 | [diff] [blame] | 784 | if (skipOptnoneFunction(L)) |
| 785 | return false; |
| 786 | |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 787 | Function &F = *L->getHeader()->getParent(); |
| 788 | |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 789 | LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 790 | ScalarEvolution *SE = &getAnalysis<ScalarEvolution>(); |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 791 | const TargetTransformInfo &TTI = |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 792 | getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 793 | auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 794 | |
Dan Gohman | 2e1f804 | 2007-05-08 15:19:19 +0000 | [diff] [blame] | 795 | BasicBlock *Header = L->getHeader(); |
David Greene | e0b9789 | 2010-01-05 01:27:44 +0000 | [diff] [blame] | 796 | DEBUG(dbgs() << "Loop Unroll: F[" << Header->getParent()->getName() |
Daniel Dunbar | 0dd5e1e | 2009-07-25 00:23:56 +0000 | [diff] [blame] | 797 | << "] Loop %" << Header->getName() << "\n"); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 798 | |
| 799 | if (HasUnrollDisablePragma(L)) { |
| 800 | return false; |
| 801 | } |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 802 | bool PragmaFullUnroll = HasUnrollFullPragma(L); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 803 | unsigned PragmaCount = UnrollCountPragmaValue(L); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 804 | bool HasPragma = PragmaFullUnroll || PragmaCount > 0; |
Andrew Trick | 279e7a6 | 2011-07-23 00:29:16 +0000 | [diff] [blame] | 805 | |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 806 | TargetTransformInfo::UnrollingPreferences UP; |
Chandler Carruth | 21fc195 | 2015-02-01 14:37:03 +0000 | [diff] [blame] | 807 | getUnrollingPreferences(L, TTI, UP); |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 808 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 809 | // Find trip count and trip multiple if count is not available |
| 810 | unsigned TripCount = 0; |
Andrew Trick | 1cabe54 | 2011-07-23 00:33:05 +0000 | [diff] [blame] | 811 | unsigned TripMultiple = 1; |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 812 | // If there are multiple exiting blocks but one of them is the latch, use the |
| 813 | // latch for the trip count estimation. Otherwise insist on a single exiting |
| 814 | // block for the trip count estimation. |
| 815 | BasicBlock *ExitingBlock = L->getLoopLatch(); |
| 816 | if (!ExitingBlock || !L->isLoopExiting(ExitingBlock)) |
| 817 | ExitingBlock = L->getExitingBlock(); |
| 818 | if (ExitingBlock) { |
| 819 | TripCount = SE->getSmallConstantTripCount(L, ExitingBlock); |
| 820 | TripMultiple = SE->getSmallConstantTripMultiple(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 821 | } |
Hal Finkel | 8f2e700 | 2013-09-11 19:25:43 +0000 | [diff] [blame] | 822 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 823 | // Select an initial unroll count. This may be reduced later based |
| 824 | // on size thresholds. |
| 825 | bool CountSetExplicitly; |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 826 | unsigned Count = selectUnrollCount(L, TripCount, PragmaFullUnroll, |
| 827 | PragmaCount, UP, CountSetExplicitly); |
Eli Bendersky | dc6de2c | 2014-06-12 18:05:39 +0000 | [diff] [blame] | 828 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 829 | unsigned NumInlineCandidates; |
| 830 | bool notDuplicatable; |
| 831 | unsigned LoopSize = |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 832 | ApproximateLoopSize(L, NumInlineCandidates, notDuplicatable, TTI, &AC); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 833 | DEBUG(dbgs() << " Loop Size = " << LoopSize << "\n"); |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 834 | |
| 835 | // When computing the unrolled size, note that the conditional branch on the |
| 836 | // backedge and the comparison feeding it are not replicated like the rest of |
| 837 | // the loop body (which is why 2 is subtracted). |
| 838 | uint64_t UnrolledSize = (uint64_t)(LoopSize-2) * Count + 2; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 839 | if (notDuplicatable) { |
| 840 | DEBUG(dbgs() << " Not unrolling loop which contains non-duplicatable" |
| 841 | << " instructions.\n"); |
| 842 | return false; |
| 843 | } |
| 844 | if (NumInlineCandidates != 0) { |
| 845 | DEBUG(dbgs() << " Not unrolling loop with inlinable calls.\n"); |
| 846 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 847 | } |
| 848 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 849 | unsigned Threshold, PartialThreshold; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 850 | unsigned AbsoluteThreshold, PercentOfOptimizedForCompleteUnroll; |
Michael Zolotukhin | a9aadd2 | 2015-02-05 02:34:00 +0000 | [diff] [blame] | 851 | selectThresholds(L, HasPragma, UP, Threshold, PartialThreshold, |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 852 | AbsoluteThreshold, PercentOfOptimizedForCompleteUnroll); |
Benjamin Kramer | 9130cb8 | 2014-05-04 19:12:38 +0000 | [diff] [blame] | 853 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 854 | // Given Count, TripCount and thresholds determine the type of |
| 855 | // unrolling which is to be performed. |
| 856 | enum { Full = 0, Partial = 1, Runtime = 2 }; |
| 857 | int Unrolling; |
| 858 | if (TripCount && Count == TripCount) { |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 859 | Unrolling = Partial; |
| 860 | // If the loop is really small, we don't need to run an expensive analysis. |
| 861 | if (canUnrollCompletely( |
| 862 | L, Threshold, AbsoluteThreshold, |
| 863 | UnrolledSize, 0, 100)) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 864 | Unrolling = Full; |
Michael Zolotukhin | 8c68171 | 2015-05-12 17:20:03 +0000 | [diff] [blame] | 865 | } else { |
| 866 | // The loop isn't that small, but we still can fully unroll it if that |
| 867 | // helps to remove a significant number of instructions. |
| 868 | // To check that, run additional analysis on the loop. |
| 869 | UnrollAnalyzer UA(L, TripCount, *SE, TTI, AbsoluteThreshold); |
| 870 | if (UA.analyzeLoop() && |
| 871 | canUnrollCompletely(L, Threshold, AbsoluteThreshold, |
| 872 | UA.UnrolledLoopSize, |
| 873 | UA.NumberOfOptimizedInstructions, |
| 874 | PercentOfOptimizedForCompleteUnroll)) { |
| 875 | Unrolling = Full; |
| 876 | } |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 877 | } |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 878 | } else if (TripCount && Count < TripCount) { |
| 879 | Unrolling = Partial; |
| 880 | } else { |
| 881 | Unrolling = Runtime; |
| 882 | } |
| 883 | |
| 884 | // Reduce count based on the type of unrolling and the threshold values. |
| 885 | unsigned OriginalCount = Count; |
| 886 | bool AllowRuntime = UserRuntime ? CurrentRuntime : UP.Runtime; |
Kevin Qin | 715b01e | 2015-03-09 06:14:18 +0000 | [diff] [blame] | 887 | if (HasRuntimeUnrollDisablePragma(L)) { |
| 888 | AllowRuntime = false; |
| 889 | } |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 890 | if (Unrolling == Partial) { |
| 891 | bool AllowPartial = UserAllowPartial ? CurrentAllowPartial : UP.Partial; |
| 892 | if (!AllowPartial && !CountSetExplicitly) { |
| 893 | DEBUG(dbgs() << " will not try to unroll partially because " |
| 894 | << "-unroll-allow-partial not given\n"); |
| 895 | return false; |
| 896 | } |
| 897 | if (PartialThreshold != NoThreshold && UnrolledSize > PartialThreshold) { |
| 898 | // Reduce unroll count to be modulo of TripCount for partial unrolling. |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 899 | Count = (std::max(PartialThreshold, 3u)-2) / (LoopSize-2); |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 900 | while (Count != 0 && TripCount % Count != 0) |
| 901 | Count--; |
| 902 | } |
| 903 | } else if (Unrolling == Runtime) { |
| 904 | if (!AllowRuntime && !CountSetExplicitly) { |
| 905 | DEBUG(dbgs() << " will not try to unroll loop with runtime trip count " |
| 906 | << "-unroll-runtime not given\n"); |
| 907 | return false; |
| 908 | } |
| 909 | // Reduce unroll count to be the largest power-of-two factor of |
| 910 | // the original count which satisfies the threshold limit. |
| 911 | while (Count != 0 && UnrolledSize > PartialThreshold) { |
| 912 | Count >>= 1; |
Hal Finkel | 38dd590 | 2015-01-10 00:30:55 +0000 | [diff] [blame] | 913 | UnrolledSize = (LoopSize-2) * Count + 2; |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 914 | } |
| 915 | if (Count > UP.MaxCount) |
| 916 | Count = UP.MaxCount; |
| 917 | DEBUG(dbgs() << " partially unrolling with count: " << Count << "\n"); |
| 918 | } |
| 919 | |
| 920 | if (HasPragma) { |
Mark Heffernan | 9e11244 | 2014-07-23 20:05:44 +0000 | [diff] [blame] | 921 | if (PragmaCount != 0) |
| 922 | // If loop has an unroll count pragma mark loop as unrolled to prevent |
| 923 | // unrolling beyond that requested by the pragma. |
| 924 | SetLoopAlreadyUnrolled(L); |
Mark Heffernan | 053a686 | 2014-07-18 21:04:33 +0000 | [diff] [blame] | 925 | |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 926 | // Emit optimization remarks if we are unable to unroll the loop |
| 927 | // as directed by a pragma. |
| 928 | DebugLoc LoopLoc = L->getStartLoc(); |
| 929 | Function *F = Header->getParent(); |
| 930 | LLVMContext &Ctx = F->getContext(); |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 931 | if (PragmaFullUnroll && PragmaCount == 0) { |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 932 | if (TripCount && Count != TripCount) { |
| 933 | emitOptimizationRemarkMissed( |
| 934 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 935 | "Unable to fully unroll loop as directed by unroll(full) pragma " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 936 | "because unrolled size is too large."); |
| 937 | } else if (!TripCount) { |
| 938 | emitOptimizationRemarkMissed( |
| 939 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
Mark Heffernan | e6b4ba1 | 2014-07-23 17:31:37 +0000 | [diff] [blame] | 940 | "Unable to fully unroll loop as directed by unroll(full) pragma " |
Eli Bendersky | ff90324 | 2014-06-16 23:53:02 +0000 | [diff] [blame] | 941 | "because loop has a runtime trip count."); |
| 942 | } |
| 943 | } else if (PragmaCount > 0 && Count != OriginalCount) { |
| 944 | emitOptimizationRemarkMissed( |
| 945 | Ctx, DEBUG_TYPE, *F, LoopLoc, |
| 946 | "Unable to unroll loop the number of times directed by " |
| 947 | "unroll_count pragma because unrolled size is too large."); |
| 948 | } |
| 949 | } |
| 950 | |
| 951 | if (Unrolling != Full && Count < 2) { |
| 952 | // Partial unrolling by 1 is a nop. For full unrolling, a factor |
| 953 | // of 1 makes sense because loop control can be eliminated. |
| 954 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 955 | } |
| 956 | |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 957 | // Unroll the loop. |
Sanjoy Das | e178f46 | 2015-04-14 03:20:38 +0000 | [diff] [blame] | 958 | if (!UnrollLoop(L, Count, TripCount, AllowRuntime, UP.AllowExpensiveTripCount, |
| 959 | TripMultiple, LI, this, &LPM, &AC)) |
Dan Gohman | 3dc2d92 | 2008-05-14 00:24:14 +0000 | [diff] [blame] | 960 | return false; |
Dan Gohman | 2980d9d | 2007-05-11 20:53:41 +0000 | [diff] [blame] | 961 | |
Chris Lattner | 946b255 | 2004-04-18 05:20:17 +0000 | [diff] [blame] | 962 | return true; |
| 963 | } |