Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 1 | //===- ScalarEvolution.cpp - Scalar Evolution Analysis --------------------===// |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the implementation of the scalar evolution analysis |
| 11 | // engine, which is used primarily to analyze expressions involving induction |
| 12 | // variables in loops. |
| 13 | // |
| 14 | // There are several aspects to this library. First is the representation of |
| 15 | // scalar expressions, which are represented as subclasses of the SCEV class. |
| 16 | // These classes are used to represent certain types of subexpressions that we |
Dan Gohman | ef2ae2c | 2009-07-25 16:18:07 +0000 | [diff] [blame] | 17 | // can handle. We only create one SCEV of a particular shape, so |
| 18 | // pointer-comparisons for equality are legal. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 19 | // |
| 20 | // One important aspect of the SCEV objects is that they are never cyclic, even |
| 21 | // if there is a cycle in the dataflow for an expression (ie, a PHI node). If |
| 22 | // the PHI node is one of the idioms that we can represent (e.g., a polynomial |
| 23 | // recurrence) then we represent it directly as a recurrence node, otherwise we |
| 24 | // represent it as a SCEVUnknown node. |
| 25 | // |
| 26 | // In addition to being able to represent expressions of various types, we also |
| 27 | // have folders that are used to build the *canonical* representation for a |
| 28 | // particular expression. These folders are capable of using a variety of |
| 29 | // rewrite rules to simplify the expressions. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 30 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 31 | // Once the folders are defined, we can implement the more interesting |
| 32 | // higher-level code, such as the code that recognizes PHI nodes of various |
| 33 | // types, computes the execution count of a loop, etc. |
| 34 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 35 | // TODO: We should use these routines and value representations to implement |
| 36 | // dependence analysis! |
| 37 | // |
| 38 | //===----------------------------------------------------------------------===// |
| 39 | // |
| 40 | // There are several good references for the techniques used in this analysis. |
| 41 | // |
| 42 | // Chains of recurrences -- a method to expedite the evaluation |
| 43 | // of closed-form functions |
| 44 | // Olaf Bachmann, Paul S. Wang, Eugene V. Zima |
| 45 | // |
| 46 | // On computational properties of chains of recurrences |
| 47 | // Eugene V. Zima |
| 48 | // |
| 49 | // Symbolic Evaluation of Chains of Recurrences for Loop Optimization |
| 50 | // Robert A. van Engelen |
| 51 | // |
| 52 | // Efficient Symbolic Analysis for Optimizing Compilers |
| 53 | // Robert A. van Engelen |
| 54 | // |
| 55 | // Using the chains of recurrences algebra for data dependence testing and |
| 56 | // induction variable substitution |
| 57 | // MS Thesis, Johnie Birch |
| 58 | // |
| 59 | //===----------------------------------------------------------------------===// |
| 60 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 61 | #include "llvm/Analysis/ScalarEvolution.h" |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 62 | #include "llvm/ADT/Optional.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 63 | #include "llvm/ADT/STLExtras.h" |
| 64 | #include "llvm/ADT/SmallPtrSet.h" |
| 65 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 66 | #include "llvm/Analysis/AssumptionCache.h" |
John Criswell | fe5f33b | 2005-10-27 15:54:34 +0000 | [diff] [blame] | 67 | #include "llvm/Analysis/ConstantFolding.h" |
Duncan Sands | d06f50e | 2010-11-17 04:18:45 +0000 | [diff] [blame] | 68 | #include "llvm/Analysis/InstructionSimplify.h" |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 69 | #include "llvm/Analysis/LoopInfo.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 70 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | 62d4215 | 2015-01-15 02:16:27 +0000 | [diff] [blame] | 71 | #include "llvm/Analysis/TargetLibraryInfo.h" |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 72 | #include "llvm/Analysis/ValueTracking.h" |
Chandler Carruth | 8cd041e | 2014-03-04 12:24:34 +0000 | [diff] [blame] | 73 | #include "llvm/IR/ConstantRange.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 74 | #include "llvm/IR/Constants.h" |
| 75 | #include "llvm/IR/DataLayout.h" |
| 76 | #include "llvm/IR/DerivedTypes.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 77 | #include "llvm/IR/Dominators.h" |
Chandler Carruth | 03eb0de | 2014-03-04 10:40:04 +0000 | [diff] [blame] | 78 | #include "llvm/IR/GetElementPtrTypeIterator.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 79 | #include "llvm/IR/GlobalAlias.h" |
| 80 | #include "llvm/IR/GlobalVariable.h" |
Chandler Carruth | 8394857 | 2014-03-04 10:30:26 +0000 | [diff] [blame] | 81 | #include "llvm/IR/InstIterator.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 82 | #include "llvm/IR/Instructions.h" |
| 83 | #include "llvm/IR/LLVMContext.h" |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 84 | #include "llvm/IR/Metadata.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 85 | #include "llvm/IR/Operator.h" |
Chris Lattner | 996795b | 2006-06-28 23:17:24 +0000 | [diff] [blame] | 86 | #include "llvm/Support/CommandLine.h" |
David Greene | 2330f78 | 2009-12-23 22:58:38 +0000 | [diff] [blame] | 87 | #include "llvm/Support/Debug.h" |
Torok Edwin | 56d0659 | 2009-07-11 20:10:48 +0000 | [diff] [blame] | 88 | #include "llvm/Support/ErrorHandling.h" |
Chris Lattner | 0a1e993 | 2006-12-19 01:16:02 +0000 | [diff] [blame] | 89 | #include "llvm/Support/MathExtras.h" |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 90 | #include "llvm/Support/raw_ostream.h" |
Alkis Evlogimenos | a5c04ee | 2004-09-03 18:19:51 +0000 | [diff] [blame] | 91 | #include <algorithm> |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 92 | using namespace llvm; |
| 93 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 94 | #define DEBUG_TYPE "scalar-evolution" |
| 95 | |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 96 | STATISTIC(NumArrayLenItCounts, |
| 97 | "Number of trip counts computed with array length"); |
| 98 | STATISTIC(NumTripCountsComputed, |
| 99 | "Number of loops with predictable loop counts"); |
| 100 | STATISTIC(NumTripCountsNotComputed, |
| 101 | "Number of loops without predictable loop counts"); |
| 102 | STATISTIC(NumBruteForceTripCountsComputed, |
| 103 | "Number of loops with trip counts computed by force"); |
| 104 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 105 | static cl::opt<unsigned> |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 106 | MaxBruteForceIterations("scalar-evolution-max-iterations", cl::ReallyHidden, |
| 107 | cl::desc("Maximum number of iterations SCEV will " |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 108 | "symbolically execute a constant " |
| 109 | "derived loop"), |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 110 | cl::init(100)); |
| 111 | |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 112 | // FIXME: Enable this with XDEBUG when the test suite is clean. |
| 113 | static cl::opt<bool> |
| 114 | VerifySCEV("verify-scev", |
| 115 | cl::desc("Verify ScalarEvolution's backedge taken counts (slow)")); |
| 116 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 117 | //===----------------------------------------------------------------------===// |
| 118 | // SCEV class definitions |
| 119 | //===----------------------------------------------------------------------===// |
| 120 | |
| 121 | //===----------------------------------------------------------------------===// |
| 122 | // Implementation of the SCEV class. |
| 123 | // |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 124 | |
Manman Ren | 49d684e | 2012-09-12 05:06:18 +0000 | [diff] [blame] | 125 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 126 | void SCEV::dump() const { |
David Greene | df1c497 | 2009-12-23 22:18:14 +0000 | [diff] [blame] | 127 | print(dbgs()); |
| 128 | dbgs() << '\n'; |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 129 | } |
Manman Ren | c3366cc | 2012-09-06 19:55:56 +0000 | [diff] [blame] | 130 | #endif |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 131 | |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 132 | void SCEV::print(raw_ostream &OS) const { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 133 | switch (static_cast<SCEVTypes>(getSCEVType())) { |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 134 | case scConstant: |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 135 | cast<SCEVConstant>(this)->getValue()->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 136 | return; |
| 137 | case scTruncate: { |
| 138 | const SCEVTruncateExpr *Trunc = cast<SCEVTruncateExpr>(this); |
| 139 | const SCEV *Op = Trunc->getOperand(); |
| 140 | OS << "(trunc " << *Op->getType() << " " << *Op << " to " |
| 141 | << *Trunc->getType() << ")"; |
| 142 | return; |
| 143 | } |
| 144 | case scZeroExtend: { |
| 145 | const SCEVZeroExtendExpr *ZExt = cast<SCEVZeroExtendExpr>(this); |
| 146 | const SCEV *Op = ZExt->getOperand(); |
| 147 | OS << "(zext " << *Op->getType() << " " << *Op << " to " |
| 148 | << *ZExt->getType() << ")"; |
| 149 | return; |
| 150 | } |
| 151 | case scSignExtend: { |
| 152 | const SCEVSignExtendExpr *SExt = cast<SCEVSignExtendExpr>(this); |
| 153 | const SCEV *Op = SExt->getOperand(); |
| 154 | OS << "(sext " << *Op->getType() << " " << *Op << " to " |
| 155 | << *SExt->getType() << ")"; |
| 156 | return; |
| 157 | } |
| 158 | case scAddRecExpr: { |
| 159 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(this); |
| 160 | OS << "{" << *AR->getOperand(0); |
| 161 | for (unsigned i = 1, e = AR->getNumOperands(); i != e; ++i) |
| 162 | OS << ",+," << *AR->getOperand(i); |
| 163 | OS << "}<"; |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 164 | if (AR->getNoWrapFlags(FlagNUW)) |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 165 | OS << "nuw><"; |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 166 | if (AR->getNoWrapFlags(FlagNSW)) |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 167 | OS << "nsw><"; |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 168 | if (AR->getNoWrapFlags(FlagNW) && |
| 169 | !AR->getNoWrapFlags((NoWrapFlags)(FlagNUW | FlagNSW))) |
| 170 | OS << "nw><"; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 171 | AR->getLoop()->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 172 | OS << ">"; |
| 173 | return; |
| 174 | } |
| 175 | case scAddExpr: |
| 176 | case scMulExpr: |
| 177 | case scUMaxExpr: |
| 178 | case scSMaxExpr: { |
| 179 | const SCEVNAryExpr *NAry = cast<SCEVNAryExpr>(this); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 180 | const char *OpStr = nullptr; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 181 | switch (NAry->getSCEVType()) { |
| 182 | case scAddExpr: OpStr = " + "; break; |
| 183 | case scMulExpr: OpStr = " * "; break; |
| 184 | case scUMaxExpr: OpStr = " umax "; break; |
| 185 | case scSMaxExpr: OpStr = " smax "; break; |
| 186 | } |
| 187 | OS << "("; |
| 188 | for (SCEVNAryExpr::op_iterator I = NAry->op_begin(), E = NAry->op_end(); |
| 189 | I != E; ++I) { |
| 190 | OS << **I; |
Benjamin Kramer | b6d0bd4 | 2014-03-02 12:27:27 +0000 | [diff] [blame] | 191 | if (std::next(I) != E) |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 192 | OS << OpStr; |
| 193 | } |
| 194 | OS << ")"; |
Andrew Trick | d912a5b | 2011-11-29 02:06:35 +0000 | [diff] [blame] | 195 | switch (NAry->getSCEVType()) { |
| 196 | case scAddExpr: |
| 197 | case scMulExpr: |
| 198 | if (NAry->getNoWrapFlags(FlagNUW)) |
| 199 | OS << "<nuw>"; |
| 200 | if (NAry->getNoWrapFlags(FlagNSW)) |
| 201 | OS << "<nsw>"; |
| 202 | } |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 203 | return; |
| 204 | } |
| 205 | case scUDivExpr: { |
| 206 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(this); |
| 207 | OS << "(" << *UDiv->getLHS() << " /u " << *UDiv->getRHS() << ")"; |
| 208 | return; |
| 209 | } |
| 210 | case scUnknown: { |
| 211 | const SCEVUnknown *U = cast<SCEVUnknown>(this); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 212 | Type *AllocTy; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 213 | if (U->isSizeOf(AllocTy)) { |
| 214 | OS << "sizeof(" << *AllocTy << ")"; |
| 215 | return; |
| 216 | } |
| 217 | if (U->isAlignOf(AllocTy)) { |
| 218 | OS << "alignof(" << *AllocTy << ")"; |
| 219 | return; |
| 220 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 221 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 222 | Type *CTy; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 223 | Constant *FieldNo; |
| 224 | if (U->isOffsetOf(CTy, FieldNo)) { |
| 225 | OS << "offsetof(" << *CTy << ", "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 226 | FieldNo->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 227 | OS << ")"; |
| 228 | return; |
| 229 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 230 | |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 231 | // Otherwise just print it normally. |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 232 | U->getValue()->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 233 | return; |
| 234 | } |
| 235 | case scCouldNotCompute: |
| 236 | OS << "***COULDNOTCOMPUTE***"; |
| 237 | return; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 238 | } |
| 239 | llvm_unreachable("Unknown SCEV kind!"); |
| 240 | } |
| 241 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 242 | Type *SCEV::getType() const { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 243 | switch (static_cast<SCEVTypes>(getSCEVType())) { |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 244 | case scConstant: |
| 245 | return cast<SCEVConstant>(this)->getType(); |
| 246 | case scTruncate: |
| 247 | case scZeroExtend: |
| 248 | case scSignExtend: |
| 249 | return cast<SCEVCastExpr>(this)->getType(); |
| 250 | case scAddRecExpr: |
| 251 | case scMulExpr: |
| 252 | case scUMaxExpr: |
| 253 | case scSMaxExpr: |
| 254 | return cast<SCEVNAryExpr>(this)->getType(); |
| 255 | case scAddExpr: |
| 256 | return cast<SCEVAddExpr>(this)->getType(); |
| 257 | case scUDivExpr: |
| 258 | return cast<SCEVUDivExpr>(this)->getType(); |
| 259 | case scUnknown: |
| 260 | return cast<SCEVUnknown>(this)->getType(); |
| 261 | case scCouldNotCompute: |
| 262 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 263 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 264 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 265 | } |
| 266 | |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 267 | bool SCEV::isZero() const { |
| 268 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
| 269 | return SC->getValue()->isZero(); |
| 270 | return false; |
| 271 | } |
| 272 | |
Dan Gohman | ba7f6d8 | 2009-05-18 15:22:39 +0000 | [diff] [blame] | 273 | bool SCEV::isOne() const { |
| 274 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
| 275 | return SC->getValue()->isOne(); |
| 276 | return false; |
| 277 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 278 | |
Dan Gohman | 18a96bb | 2009-06-24 00:30:26 +0000 | [diff] [blame] | 279 | bool SCEV::isAllOnesValue() const { |
| 280 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
| 281 | return SC->getValue()->isAllOnesValue(); |
| 282 | return false; |
| 283 | } |
| 284 | |
Andrew Trick | 881a776 | 2012-01-07 00:27:31 +0000 | [diff] [blame] | 285 | /// isNonConstantNegative - Return true if the specified scev is negated, but |
| 286 | /// not a constant. |
| 287 | bool SCEV::isNonConstantNegative() const { |
| 288 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(this); |
| 289 | if (!Mul) return false; |
| 290 | |
| 291 | // If there is a constant factor, it will be first. |
| 292 | const SCEVConstant *SC = dyn_cast<SCEVConstant>(Mul->getOperand(0)); |
| 293 | if (!SC) return false; |
| 294 | |
| 295 | // Return true if the value is negative, this matches things like (-42 * V). |
| 296 | return SC->getValue()->getValue().isNegative(); |
| 297 | } |
| 298 | |
Owen Anderson | 04052ec | 2009-06-22 21:57:23 +0000 | [diff] [blame] | 299 | SCEVCouldNotCompute::SCEVCouldNotCompute() : |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 300 | SCEV(FoldingSetNodeIDRef(), scCouldNotCompute) {} |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 301 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 302 | bool SCEVCouldNotCompute::classof(const SCEV *S) { |
| 303 | return S->getSCEVType() == scCouldNotCompute; |
| 304 | } |
| 305 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 306 | const SCEV *ScalarEvolution::getConstant(ConstantInt *V) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 307 | FoldingSetNodeID ID; |
| 308 | ID.AddInteger(scConstant); |
| 309 | ID.AddPointer(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 310 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 311 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 312 | SCEV *S = new (SCEVAllocator) SCEVConstant(ID.Intern(SCEVAllocator), V); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 313 | UniqueSCEVs.InsertNode(S, IP); |
| 314 | return S; |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 315 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 316 | |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 317 | const SCEV *ScalarEvolution::getConstant(const APInt &Val) { |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 318 | return getConstant(ConstantInt::get(getContext(), Val)); |
Dan Gohman | 0a76e7f | 2007-07-09 15:25:17 +0000 | [diff] [blame] | 319 | } |
| 320 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 321 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 322 | ScalarEvolution::getConstant(Type *Ty, uint64_t V, bool isSigned) { |
| 323 | IntegerType *ITy = cast<IntegerType>(getEffectiveSCEVType(Ty)); |
Dan Gohman | a029cbe | 2010-04-21 16:04:04 +0000 | [diff] [blame] | 324 | return getConstant(ConstantInt::get(ITy, V, isSigned)); |
Dan Gohman | 7ccc52f | 2009-06-15 22:12:54 +0000 | [diff] [blame] | 325 | } |
| 326 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 327 | SCEVCastExpr::SCEVCastExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 328 | unsigned SCEVTy, const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 329 | : SCEV(ID, SCEVTy), Op(op), Ty(ty) {} |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 330 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 331 | SCEVTruncateExpr::SCEVTruncateExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 332 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 333 | : SCEVCastExpr(ID, scTruncate, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 334 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 335 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 336 | "Cannot truncate non-integer value!"); |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 337 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 338 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 339 | SCEVZeroExtendExpr::SCEVZeroExtendExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 340 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 341 | : SCEVCastExpr(ID, scZeroExtend, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 342 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 343 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 344 | "Cannot zero extend non-integer value!"); |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 345 | } |
| 346 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 347 | SCEVSignExtendExpr::SCEVSignExtendExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 348 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 349 | : SCEVCastExpr(ID, scSignExtend, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 350 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 351 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 352 | "Cannot sign extend non-integer value!"); |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 353 | } |
| 354 | |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 355 | void SCEVUnknown::deleted() { |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 356 | // Clear this SCEVUnknown from various maps. |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 357 | SE->forgetMemoizedResults(this); |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 358 | |
| 359 | // Remove this SCEVUnknown from the uniquing map. |
| 360 | SE->UniqueSCEVs.RemoveNode(this); |
| 361 | |
| 362 | // Release the value. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 363 | setValPtr(nullptr); |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 364 | } |
| 365 | |
| 366 | void SCEVUnknown::allUsesReplacedWith(Value *New) { |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 367 | // Clear this SCEVUnknown from various maps. |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 368 | SE->forgetMemoizedResults(this); |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 369 | |
| 370 | // Remove this SCEVUnknown from the uniquing map. |
| 371 | SE->UniqueSCEVs.RemoveNode(this); |
| 372 | |
| 373 | // Update this SCEVUnknown to point to the new value. This is needed |
| 374 | // because there may still be outstanding SCEVs which still point to |
| 375 | // this SCEVUnknown. |
| 376 | setValPtr(New); |
| 377 | } |
| 378 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 379 | bool SCEVUnknown::isSizeOf(Type *&AllocTy) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 380 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 381 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 382 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 383 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 384 | CE->getOperand(0)->isNullValue() && |
| 385 | CE->getNumOperands() == 2) |
| 386 | if (ConstantInt *CI = dyn_cast<ConstantInt>(CE->getOperand(1))) |
| 387 | if (CI->isOne()) { |
| 388 | AllocTy = cast<PointerType>(CE->getOperand(0)->getType()) |
| 389 | ->getElementType(); |
| 390 | return true; |
| 391 | } |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 392 | |
| 393 | return false; |
| 394 | } |
| 395 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 396 | bool SCEVUnknown::isAlignOf(Type *&AllocTy) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 397 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 398 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 399 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 400 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 401 | CE->getOperand(0)->isNullValue()) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 402 | Type *Ty = |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 403 | cast<PointerType>(CE->getOperand(0)->getType())->getElementType(); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 404 | if (StructType *STy = dyn_cast<StructType>(Ty)) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 405 | if (!STy->isPacked() && |
| 406 | CE->getNumOperands() == 3 && |
| 407 | CE->getOperand(1)->isNullValue()) { |
| 408 | if (ConstantInt *CI = dyn_cast<ConstantInt>(CE->getOperand(2))) |
| 409 | if (CI->isOne() && |
| 410 | STy->getNumElements() == 2 && |
Duncan Sands | 9dff9be | 2010-02-15 16:12:20 +0000 | [diff] [blame] | 411 | STy->getElementType(0)->isIntegerTy(1)) { |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 412 | AllocTy = STy->getElementType(1); |
| 413 | return true; |
| 414 | } |
| 415 | } |
| 416 | } |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 417 | |
| 418 | return false; |
| 419 | } |
| 420 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 421 | bool SCEVUnknown::isOffsetOf(Type *&CTy, Constant *&FieldNo) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 422 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 423 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 424 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
| 425 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 426 | CE->getNumOperands() == 3 && |
| 427 | CE->getOperand(0)->isNullValue() && |
| 428 | CE->getOperand(1)->isNullValue()) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 429 | Type *Ty = |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 430 | cast<PointerType>(CE->getOperand(0)->getType())->getElementType(); |
| 431 | // Ignore vector types here so that ScalarEvolutionExpander doesn't |
| 432 | // emit getelementptrs that index into vectors. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 433 | if (Ty->isStructTy() || Ty->isArrayTy()) { |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 434 | CTy = Ty; |
| 435 | FieldNo = CE->getOperand(2); |
| 436 | return true; |
| 437 | } |
| 438 | } |
| 439 | |
| 440 | return false; |
| 441 | } |
| 442 | |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 443 | //===----------------------------------------------------------------------===// |
| 444 | // SCEV Utilities |
| 445 | //===----------------------------------------------------------------------===// |
| 446 | |
| 447 | namespace { |
| 448 | /// SCEVComplexityCompare - Return true if the complexity of the LHS is less |
| 449 | /// than the complexity of the RHS. This comparator is used to canonicalize |
| 450 | /// expressions. |
Nick Lewycky | 02d5f77 | 2009-10-25 06:33:48 +0000 | [diff] [blame] | 451 | class SCEVComplexityCompare { |
Dan Gohman | 3324b9e | 2010-08-13 20:17:27 +0000 | [diff] [blame] | 452 | const LoopInfo *const LI; |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 453 | public: |
Dan Gohman | 992db00 | 2010-07-23 21:18:55 +0000 | [diff] [blame] | 454 | explicit SCEVComplexityCompare(const LoopInfo *li) : LI(li) {} |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 455 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 456 | // Return true or false if LHS is less than, or at least RHS, respectively. |
Dan Gohman | 5e6ce7b | 2008-04-14 18:23:56 +0000 | [diff] [blame] | 457 | bool operator()(const SCEV *LHS, const SCEV *RHS) const { |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 458 | return compare(LHS, RHS) < 0; |
| 459 | } |
| 460 | |
| 461 | // Return negative, zero, or positive, if LHS is less than, equal to, or |
| 462 | // greater than RHS, respectively. A three-way result allows recursive |
| 463 | // comparisons to be more efficient. |
| 464 | int compare(const SCEV *LHS, const SCEV *RHS) const { |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 465 | // Fast-path: SCEVs are uniqued so we can do a quick equality check. |
| 466 | if (LHS == RHS) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 467 | return 0; |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 468 | |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 469 | // Primarily, sort the SCEVs by their getSCEVType(). |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 470 | unsigned LType = LHS->getSCEVType(), RType = RHS->getSCEVType(); |
| 471 | if (LType != RType) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 472 | return (int)LType - (int)RType; |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 473 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 474 | // Aside from the getSCEVType() ordering, the particular ordering |
| 475 | // isn't very important except that it's beneficial to be consistent, |
| 476 | // so that (a + b) and (b + a) don't end up as different expressions. |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 477 | switch (static_cast<SCEVTypes>(LType)) { |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 478 | case scUnknown: { |
| 479 | const SCEVUnknown *LU = cast<SCEVUnknown>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 480 | const SCEVUnknown *RU = cast<SCEVUnknown>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 481 | |
| 482 | // Sort SCEVUnknown values with some loose heuristics. TODO: This is |
| 483 | // not as complete as it could be. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 484 | const Value *LV = LU->getValue(), *RV = RU->getValue(); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 485 | |
| 486 | // Order pointer values after integer values. This helps SCEVExpander |
| 487 | // form GEPs. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 488 | bool LIsPointer = LV->getType()->isPointerTy(), |
| 489 | RIsPointer = RV->getType()->isPointerTy(); |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 490 | if (LIsPointer != RIsPointer) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 491 | return (int)LIsPointer - (int)RIsPointer; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 492 | |
| 493 | // Compare getValueID values. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 494 | unsigned LID = LV->getValueID(), |
| 495 | RID = RV->getValueID(); |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 496 | if (LID != RID) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 497 | return (int)LID - (int)RID; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 498 | |
| 499 | // Sort arguments by their position. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 500 | if (const Argument *LA = dyn_cast<Argument>(LV)) { |
| 501 | const Argument *RA = cast<Argument>(RV); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 502 | unsigned LArgNo = LA->getArgNo(), RArgNo = RA->getArgNo(); |
| 503 | return (int)LArgNo - (int)RArgNo; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 504 | } |
| 505 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 506 | // For instructions, compare their loop depth, and their operand |
| 507 | // count. This is pretty loose. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 508 | if (const Instruction *LInst = dyn_cast<Instruction>(LV)) { |
| 509 | const Instruction *RInst = cast<Instruction>(RV); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 510 | |
| 511 | // Compare loop depths. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 512 | const BasicBlock *LParent = LInst->getParent(), |
| 513 | *RParent = RInst->getParent(); |
| 514 | if (LParent != RParent) { |
| 515 | unsigned LDepth = LI->getLoopDepth(LParent), |
| 516 | RDepth = LI->getLoopDepth(RParent); |
| 517 | if (LDepth != RDepth) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 518 | return (int)LDepth - (int)RDepth; |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 519 | } |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 520 | |
| 521 | // Compare the number of operands. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 522 | unsigned LNumOps = LInst->getNumOperands(), |
| 523 | RNumOps = RInst->getNumOperands(); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 524 | return (int)LNumOps - (int)RNumOps; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 525 | } |
| 526 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 527 | return 0; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 528 | } |
| 529 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 530 | case scConstant: { |
| 531 | const SCEVConstant *LC = cast<SCEVConstant>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 532 | const SCEVConstant *RC = cast<SCEVConstant>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 533 | |
| 534 | // Compare constant values. |
Dan Gohman | f296182 | 2010-08-16 16:25:35 +0000 | [diff] [blame] | 535 | const APInt &LA = LC->getValue()->getValue(); |
| 536 | const APInt &RA = RC->getValue()->getValue(); |
| 537 | unsigned LBitWidth = LA.getBitWidth(), RBitWidth = RA.getBitWidth(); |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 538 | if (LBitWidth != RBitWidth) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 539 | return (int)LBitWidth - (int)RBitWidth; |
| 540 | return LA.ult(RA) ? -1 : 1; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 541 | } |
| 542 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 543 | case scAddRecExpr: { |
| 544 | const SCEVAddRecExpr *LA = cast<SCEVAddRecExpr>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 545 | const SCEVAddRecExpr *RA = cast<SCEVAddRecExpr>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 546 | |
| 547 | // Compare addrec loop depths. |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 548 | const Loop *LLoop = LA->getLoop(), *RLoop = RA->getLoop(); |
| 549 | if (LLoop != RLoop) { |
| 550 | unsigned LDepth = LLoop->getLoopDepth(), |
| 551 | RDepth = RLoop->getLoopDepth(); |
| 552 | if (LDepth != RDepth) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 553 | return (int)LDepth - (int)RDepth; |
Dan Gohman | 0c436ab | 2010-08-13 21:24:58 +0000 | [diff] [blame] | 554 | } |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 555 | |
| 556 | // Addrec complexity grows with operand count. |
| 557 | unsigned LNumOps = LA->getNumOperands(), RNumOps = RA->getNumOperands(); |
| 558 | if (LNumOps != RNumOps) |
| 559 | return (int)LNumOps - (int)RNumOps; |
| 560 | |
| 561 | // Lexicographically compare. |
| 562 | for (unsigned i = 0; i != LNumOps; ++i) { |
| 563 | long X = compare(LA->getOperand(i), RA->getOperand(i)); |
| 564 | if (X != 0) |
| 565 | return X; |
| 566 | } |
| 567 | |
| 568 | return 0; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 569 | } |
| 570 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 571 | case scAddExpr: |
| 572 | case scMulExpr: |
| 573 | case scSMaxExpr: |
| 574 | case scUMaxExpr: { |
| 575 | const SCEVNAryExpr *LC = cast<SCEVNAryExpr>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 576 | const SCEVNAryExpr *RC = cast<SCEVNAryExpr>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 577 | |
| 578 | // Lexicographically compare n-ary expressions. |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 579 | unsigned LNumOps = LC->getNumOperands(), RNumOps = RC->getNumOperands(); |
Andrew Trick | c3bc8b8 | 2013-07-31 02:43:40 +0000 | [diff] [blame] | 580 | if (LNumOps != RNumOps) |
| 581 | return (int)LNumOps - (int)RNumOps; |
| 582 | |
Dan Gohman | 5ae3102 | 2010-07-23 21:20:52 +0000 | [diff] [blame] | 583 | for (unsigned i = 0; i != LNumOps; ++i) { |
| 584 | if (i >= RNumOps) |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 585 | return 1; |
| 586 | long X = compare(LC->getOperand(i), RC->getOperand(i)); |
| 587 | if (X != 0) |
| 588 | return X; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 589 | } |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 590 | return (int)LNumOps - (int)RNumOps; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 591 | } |
| 592 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 593 | case scUDivExpr: { |
| 594 | const SCEVUDivExpr *LC = cast<SCEVUDivExpr>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 595 | const SCEVUDivExpr *RC = cast<SCEVUDivExpr>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 596 | |
| 597 | // Lexicographically compare udiv expressions. |
| 598 | long X = compare(LC->getLHS(), RC->getLHS()); |
| 599 | if (X != 0) |
| 600 | return X; |
| 601 | return compare(LC->getRHS(), RC->getRHS()); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 602 | } |
| 603 | |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 604 | case scTruncate: |
| 605 | case scZeroExtend: |
| 606 | case scSignExtend: { |
| 607 | const SCEVCastExpr *LC = cast<SCEVCastExpr>(LHS); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 608 | const SCEVCastExpr *RC = cast<SCEVCastExpr>(RHS); |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 609 | |
| 610 | // Compare cast expressions by operand. |
| 611 | return compare(LC->getOperand(), RC->getOperand()); |
| 612 | } |
| 613 | |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 614 | case scCouldNotCompute: |
| 615 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 616 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 617 | llvm_unreachable("Unknown SCEV kind!"); |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 618 | } |
| 619 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 620 | } |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 621 | |
| 622 | /// GroupByComplexity - Given a list of SCEV objects, order them by their |
| 623 | /// complexity, and group objects of the same complexity together by value. |
| 624 | /// When this routine is finished, we know that any duplicates in the vector are |
| 625 | /// consecutive and that complexity is monotonically increasing. |
| 626 | /// |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 627 | /// Note that we go take special precautions to ensure that we get deterministic |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 628 | /// results from this routine. In other words, we don't want the results of |
| 629 | /// this to depend on where the addresses of various SCEV objects happened to |
| 630 | /// land in memory. |
| 631 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 632 | static void GroupByComplexity(SmallVectorImpl<const SCEV *> &Ops, |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 633 | LoopInfo *LI) { |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 634 | if (Ops.size() < 2) return; // Noop |
| 635 | if (Ops.size() == 2) { |
| 636 | // This is the common case, which also happens to be trivially simple. |
| 637 | // Special case it. |
Dan Gohman | 7712d29 | 2010-08-29 15:07:13 +0000 | [diff] [blame] | 638 | const SCEV *&LHS = Ops[0], *&RHS = Ops[1]; |
| 639 | if (SCEVComplexityCompare(LI)(RHS, LHS)) |
| 640 | std::swap(LHS, RHS); |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 641 | return; |
| 642 | } |
| 643 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 644 | // Do the rough sort by complexity. |
| 645 | std::stable_sort(Ops.begin(), Ops.end(), SCEVComplexityCompare(LI)); |
| 646 | |
| 647 | // Now that we are sorted by complexity, group elements of the same |
| 648 | // complexity. Note that this is, at worst, N^2, but the vector is likely to |
| 649 | // be extremely short in practice. Note that we take this approach because we |
| 650 | // do not want to depend on the addresses of the objects we are grouping. |
| 651 | for (unsigned i = 0, e = Ops.size(); i != e-2; ++i) { |
| 652 | const SCEV *S = Ops[i]; |
| 653 | unsigned Complexity = S->getSCEVType(); |
| 654 | |
| 655 | // If there are any objects of the same complexity and same value as this |
| 656 | // one, group them. |
| 657 | for (unsigned j = i+1; j != e && Ops[j]->getSCEVType() == Complexity; ++j) { |
| 658 | if (Ops[j] == S) { // Found a duplicate. |
| 659 | // Move it to immediately after i'th element. |
| 660 | std::swap(Ops[i+1], Ops[j]); |
| 661 | ++i; // no need to rescan it. |
| 662 | if (i == e-2) return; // Done! |
| 663 | } |
| 664 | } |
| 665 | } |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 666 | } |
| 667 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 668 | namespace { |
| 669 | struct FindSCEVSize { |
| 670 | int Size; |
| 671 | FindSCEVSize() : Size(0) {} |
| 672 | |
| 673 | bool follow(const SCEV *S) { |
| 674 | ++Size; |
| 675 | // Keep looking at all operands of S. |
| 676 | return true; |
| 677 | } |
| 678 | bool isDone() const { |
| 679 | return false; |
| 680 | } |
| 681 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 682 | } |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 683 | |
| 684 | // Returns the size of the SCEV S. |
| 685 | static inline int sizeOfSCEV(const SCEV *S) { |
| 686 | FindSCEVSize F; |
| 687 | SCEVTraversal<FindSCEVSize> ST(F); |
| 688 | ST.visitAll(S); |
| 689 | return F.Size; |
| 690 | } |
| 691 | |
| 692 | namespace { |
| 693 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 694 | struct SCEVDivision : public SCEVVisitor<SCEVDivision, void> { |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 695 | public: |
| 696 | // Computes the Quotient and Remainder of the division of Numerator by |
| 697 | // Denominator. |
| 698 | static void divide(ScalarEvolution &SE, const SCEV *Numerator, |
| 699 | const SCEV *Denominator, const SCEV **Quotient, |
| 700 | const SCEV **Remainder) { |
| 701 | assert(Numerator && Denominator && "Uninitialized SCEV"); |
| 702 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 703 | SCEVDivision D(SE, Numerator, Denominator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 704 | |
| 705 | // Check for the trivial case here to avoid having to check for it in the |
| 706 | // rest of the code. |
| 707 | if (Numerator == Denominator) { |
| 708 | *Quotient = D.One; |
| 709 | *Remainder = D.Zero; |
| 710 | return; |
| 711 | } |
| 712 | |
| 713 | if (Numerator->isZero()) { |
| 714 | *Quotient = D.Zero; |
| 715 | *Remainder = D.Zero; |
| 716 | return; |
| 717 | } |
| 718 | |
Brendon Cahoon | a57cc8b | 2015-04-20 16:03:28 +0000 | [diff] [blame] | 719 | // A simple case when N/1. The quotient is N. |
| 720 | if (Denominator->isOne()) { |
| 721 | *Quotient = Numerator; |
| 722 | *Remainder = D.Zero; |
| 723 | return; |
| 724 | } |
| 725 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 726 | // Split the Denominator when it is a product. |
| 727 | if (const SCEVMulExpr *T = dyn_cast<const SCEVMulExpr>(Denominator)) { |
| 728 | const SCEV *Q, *R; |
| 729 | *Quotient = Numerator; |
| 730 | for (const SCEV *Op : T->operands()) { |
| 731 | divide(SE, *Quotient, Op, &Q, &R); |
| 732 | *Quotient = Q; |
| 733 | |
| 734 | // Bail out when the Numerator is not divisible by one of the terms of |
| 735 | // the Denominator. |
| 736 | if (!R->isZero()) { |
| 737 | *Quotient = D.Zero; |
| 738 | *Remainder = Numerator; |
| 739 | return; |
| 740 | } |
| 741 | } |
| 742 | *Remainder = D.Zero; |
| 743 | return; |
| 744 | } |
| 745 | |
| 746 | D.visit(Numerator); |
| 747 | *Quotient = D.Quotient; |
| 748 | *Remainder = D.Remainder; |
| 749 | } |
| 750 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 751 | // Except in the trivial case described above, we do not know how to divide |
| 752 | // Expr by Denominator for the following functions with empty implementation. |
| 753 | void visitTruncateExpr(const SCEVTruncateExpr *Numerator) {} |
| 754 | void visitZeroExtendExpr(const SCEVZeroExtendExpr *Numerator) {} |
| 755 | void visitSignExtendExpr(const SCEVSignExtendExpr *Numerator) {} |
| 756 | void visitUDivExpr(const SCEVUDivExpr *Numerator) {} |
| 757 | void visitSMaxExpr(const SCEVSMaxExpr *Numerator) {} |
| 758 | void visitUMaxExpr(const SCEVUMaxExpr *Numerator) {} |
| 759 | void visitUnknown(const SCEVUnknown *Numerator) {} |
| 760 | void visitCouldNotCompute(const SCEVCouldNotCompute *Numerator) {} |
| 761 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 762 | void visitConstant(const SCEVConstant *Numerator) { |
| 763 | if (const SCEVConstant *D = dyn_cast<SCEVConstant>(Denominator)) { |
| 764 | APInt NumeratorVal = Numerator->getValue()->getValue(); |
| 765 | APInt DenominatorVal = D->getValue()->getValue(); |
| 766 | uint32_t NumeratorBW = NumeratorVal.getBitWidth(); |
| 767 | uint32_t DenominatorBW = DenominatorVal.getBitWidth(); |
| 768 | |
| 769 | if (NumeratorBW > DenominatorBW) |
| 770 | DenominatorVal = DenominatorVal.sext(NumeratorBW); |
| 771 | else if (NumeratorBW < DenominatorBW) |
| 772 | NumeratorVal = NumeratorVal.sext(DenominatorBW); |
| 773 | |
| 774 | APInt QuotientVal(NumeratorVal.getBitWidth(), 0); |
| 775 | APInt RemainderVal(NumeratorVal.getBitWidth(), 0); |
| 776 | APInt::sdivrem(NumeratorVal, DenominatorVal, QuotientVal, RemainderVal); |
| 777 | Quotient = SE.getConstant(QuotientVal); |
| 778 | Remainder = SE.getConstant(RemainderVal); |
| 779 | return; |
| 780 | } |
| 781 | } |
| 782 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 783 | void visitAddRecExpr(const SCEVAddRecExpr *Numerator) { |
| 784 | const SCEV *StartQ, *StartR, *StepQ, *StepR; |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 785 | if (!Numerator->isAffine()) |
| 786 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 787 | divide(SE, Numerator->getStart(), Denominator, &StartQ, &StartR); |
| 788 | divide(SE, Numerator->getStepRecurrence(SE), Denominator, &StepQ, &StepR); |
Brendon Cahoon | f9751ad | 2015-04-22 15:06:40 +0000 | [diff] [blame] | 789 | // Bail out if the types do not match. |
| 790 | Type *Ty = Denominator->getType(); |
| 791 | if (Ty != StartQ->getType() || Ty != StartR->getType() || |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 792 | Ty != StepQ->getType() || Ty != StepR->getType()) |
| 793 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 794 | Quotient = SE.getAddRecExpr(StartQ, StepQ, Numerator->getLoop(), |
| 795 | Numerator->getNoWrapFlags()); |
| 796 | Remainder = SE.getAddRecExpr(StartR, StepR, Numerator->getLoop(), |
| 797 | Numerator->getNoWrapFlags()); |
| 798 | } |
| 799 | |
| 800 | void visitAddExpr(const SCEVAddExpr *Numerator) { |
| 801 | SmallVector<const SCEV *, 2> Qs, Rs; |
| 802 | Type *Ty = Denominator->getType(); |
| 803 | |
| 804 | for (const SCEV *Op : Numerator->operands()) { |
| 805 | const SCEV *Q, *R; |
| 806 | divide(SE, Op, Denominator, &Q, &R); |
| 807 | |
| 808 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 809 | if (Ty != Q->getType() || Ty != R->getType()) |
| 810 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 811 | |
| 812 | Qs.push_back(Q); |
| 813 | Rs.push_back(R); |
| 814 | } |
| 815 | |
| 816 | if (Qs.size() == 1) { |
| 817 | Quotient = Qs[0]; |
| 818 | Remainder = Rs[0]; |
| 819 | return; |
| 820 | } |
| 821 | |
| 822 | Quotient = SE.getAddExpr(Qs); |
| 823 | Remainder = SE.getAddExpr(Rs); |
| 824 | } |
| 825 | |
| 826 | void visitMulExpr(const SCEVMulExpr *Numerator) { |
| 827 | SmallVector<const SCEV *, 2> Qs; |
| 828 | Type *Ty = Denominator->getType(); |
| 829 | |
| 830 | bool FoundDenominatorTerm = false; |
| 831 | for (const SCEV *Op : Numerator->operands()) { |
| 832 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 833 | if (Ty != Op->getType()) |
| 834 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 835 | |
| 836 | if (FoundDenominatorTerm) { |
| 837 | Qs.push_back(Op); |
| 838 | continue; |
| 839 | } |
| 840 | |
| 841 | // Check whether Denominator divides one of the product operands. |
| 842 | const SCEV *Q, *R; |
| 843 | divide(SE, Op, Denominator, &Q, &R); |
| 844 | if (!R->isZero()) { |
| 845 | Qs.push_back(Op); |
| 846 | continue; |
| 847 | } |
| 848 | |
| 849 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 850 | if (Ty != Q->getType()) |
| 851 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 852 | |
| 853 | FoundDenominatorTerm = true; |
| 854 | Qs.push_back(Q); |
| 855 | } |
| 856 | |
| 857 | if (FoundDenominatorTerm) { |
| 858 | Remainder = Zero; |
| 859 | if (Qs.size() == 1) |
| 860 | Quotient = Qs[0]; |
| 861 | else |
| 862 | Quotient = SE.getMulExpr(Qs); |
| 863 | return; |
| 864 | } |
| 865 | |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 866 | if (!isa<SCEVUnknown>(Denominator)) |
| 867 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 868 | |
| 869 | // The Remainder is obtained by replacing Denominator by 0 in Numerator. |
| 870 | ValueToValueMap RewriteMap; |
| 871 | RewriteMap[cast<SCEVUnknown>(Denominator)->getValue()] = |
| 872 | cast<SCEVConstant>(Zero)->getValue(); |
| 873 | Remainder = SCEVParameterRewriter::rewrite(Numerator, SE, RewriteMap, true); |
| 874 | |
| 875 | if (Remainder->isZero()) { |
| 876 | // The Quotient is obtained by replacing Denominator by 1 in Numerator. |
| 877 | RewriteMap[cast<SCEVUnknown>(Denominator)->getValue()] = |
| 878 | cast<SCEVConstant>(One)->getValue(); |
| 879 | Quotient = |
| 880 | SCEVParameterRewriter::rewrite(Numerator, SE, RewriteMap, true); |
| 881 | return; |
| 882 | } |
| 883 | |
| 884 | // Quotient is (Numerator - Remainder) divided by Denominator. |
| 885 | const SCEV *Q, *R; |
| 886 | const SCEV *Diff = SE.getMinusSCEV(Numerator, Remainder); |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 887 | // This SCEV does not seem to simplify: fail the division here. |
| 888 | if (sizeOfSCEV(Diff) > sizeOfSCEV(Numerator)) |
| 889 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 890 | divide(SE, Diff, Denominator, &Q, &R); |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 891 | if (R != Zero) |
| 892 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 893 | Quotient = Q; |
| 894 | } |
| 895 | |
| 896 | private: |
David Majnemer | 5d2670c | 2014-11-17 11:27:45 +0000 | [diff] [blame] | 897 | SCEVDivision(ScalarEvolution &S, const SCEV *Numerator, |
| 898 | const SCEV *Denominator) |
| 899 | : SE(S), Denominator(Denominator) { |
| 900 | Zero = SE.getConstant(Denominator->getType(), 0); |
| 901 | One = SE.getConstant(Denominator->getType(), 1); |
| 902 | |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame^] | 903 | // We generally do not know how to divide Expr by Denominator. We |
| 904 | // initialize the division to a "cannot divide" state to simplify the rest |
| 905 | // of the code. |
| 906 | cannotDivide(Numerator); |
| 907 | } |
| 908 | |
| 909 | // Convenience function for giving up on the division. We set the quotient to |
| 910 | // be equal to zero and the remainder to be equal to the numerator. |
| 911 | void cannotDivide(const SCEV *Numerator) { |
David Majnemer | 5d2670c | 2014-11-17 11:27:45 +0000 | [diff] [blame] | 912 | Quotient = Zero; |
| 913 | Remainder = Numerator; |
| 914 | } |
| 915 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 916 | ScalarEvolution &SE; |
| 917 | const SCEV *Denominator, *Quotient, *Remainder, *Zero, *One; |
David Majnemer | 32b8ccf | 2014-11-16 20:35:19 +0000 | [diff] [blame] | 918 | }; |
| 919 | |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 920 | } |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 921 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 922 | //===----------------------------------------------------------------------===// |
| 923 | // Simple SCEV method implementations |
| 924 | //===----------------------------------------------------------------------===// |
| 925 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 926 | /// BinomialCoefficient - Compute BC(It, K). The result has width W. |
Dan Gohman | 4d5435d | 2009-05-24 23:45:28 +0000 | [diff] [blame] | 927 | /// Assume, K > 0. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 928 | static const SCEV *BinomialCoefficient(const SCEV *It, unsigned K, |
Dan Gohman | 32291b1 | 2009-07-21 00:38:55 +0000 | [diff] [blame] | 929 | ScalarEvolution &SE, |
Nick Lewycky | 702cf1e | 2011-09-06 06:39:54 +0000 | [diff] [blame] | 930 | Type *ResultTy) { |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 931 | // Handle the simplest case efficiently. |
| 932 | if (K == 1) |
| 933 | return SE.getTruncateOrZeroExtend(It, ResultTy); |
| 934 | |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 935 | // We are using the following formula for BC(It, K): |
| 936 | // |
| 937 | // BC(It, K) = (It * (It - 1) * ... * (It - K + 1)) / K! |
| 938 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 939 | // Suppose, W is the bitwidth of the return value. We must be prepared for |
| 940 | // overflow. Hence, we must assure that the result of our computation is |
| 941 | // equal to the accurate one modulo 2^W. Unfortunately, division isn't |
| 942 | // safe in modular arithmetic. |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 943 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 944 | // However, this code doesn't use exactly that formula; the formula it uses |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 945 | // is something like the following, where T is the number of factors of 2 in |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 946 | // K! (i.e. trailing zeros in the binary representation of K!), and ^ is |
| 947 | // exponentiation: |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 948 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 949 | // BC(It, K) = (It * (It - 1) * ... * (It - K + 1)) / 2^T / (K! / 2^T) |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 950 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 951 | // This formula is trivially equivalent to the previous formula. However, |
| 952 | // this formula can be implemented much more efficiently. The trick is that |
| 953 | // K! / 2^T is odd, and exact division by an odd number *is* safe in modular |
| 954 | // arithmetic. To do exact division in modular arithmetic, all we have |
| 955 | // to do is multiply by the inverse. Therefore, this step can be done at |
| 956 | // width W. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 957 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 958 | // The next issue is how to safely do the division by 2^T. The way this |
| 959 | // is done is by doing the multiplication step at a width of at least W + T |
| 960 | // bits. This way, the bottom W+T bits of the product are accurate. Then, |
| 961 | // when we perform the division by 2^T (which is equivalent to a right shift |
| 962 | // by T), the bottom W bits are accurate. Extra bits are okay; they'll get |
| 963 | // truncated out after the division by 2^T. |
| 964 | // |
| 965 | // In comparison to just directly using the first formula, this technique |
| 966 | // is much more efficient; using the first formula requires W * K bits, |
| 967 | // but this formula less than W + K bits. Also, the first formula requires |
| 968 | // a division step, whereas this formula only requires multiplies and shifts. |
| 969 | // |
| 970 | // It doesn't matter whether the subtraction step is done in the calculation |
| 971 | // width or the input iteration count's width; if the subtraction overflows, |
| 972 | // the result must be zero anyway. We prefer here to do it in the width of |
| 973 | // the induction variable because it helps a lot for certain cases; CodeGen |
| 974 | // isn't smart enough to ignore the overflow, which leads to much less |
| 975 | // efficient code if the width of the subtraction is wider than the native |
| 976 | // register width. |
| 977 | // |
| 978 | // (It's possible to not widen at all by pulling out factors of 2 before |
| 979 | // the multiplication; for example, K=2 can be calculated as |
| 980 | // It/2*(It+(It*INT_MIN/INT_MIN)+-1). However, it requires |
| 981 | // extra arithmetic, so it's not an obvious win, and it gets |
| 982 | // much more complicated for K > 3.) |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 983 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 984 | // Protection from insane SCEVs; this bound is conservative, |
| 985 | // but it probably doesn't matter. |
| 986 | if (K > 1000) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 987 | return SE.getCouldNotCompute(); |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 988 | |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 989 | unsigned W = SE.getTypeSizeInBits(ResultTy); |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 990 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 991 | // Calculate K! / 2^T and T; we divide out the factors of two before |
| 992 | // multiplying for calculating K! / 2^T to avoid overflow. |
| 993 | // Other overflow doesn't matter because we only care about the bottom |
| 994 | // W bits of the result. |
| 995 | APInt OddFactorial(W, 1); |
| 996 | unsigned T = 1; |
| 997 | for (unsigned i = 3; i <= K; ++i) { |
| 998 | APInt Mult(W, i); |
| 999 | unsigned TwoFactors = Mult.countTrailingZeros(); |
| 1000 | T += TwoFactors; |
| 1001 | Mult = Mult.lshr(TwoFactors); |
| 1002 | OddFactorial *= Mult; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1003 | } |
Nick Lewycky | ed169d5 | 2008-06-13 04:38:55 +0000 | [diff] [blame] | 1004 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1005 | // We need at least W + T bits for the multiplication step |
Nick Lewycky | 21add8f | 2009-01-25 08:16:27 +0000 | [diff] [blame] | 1006 | unsigned CalculationBits = W + T; |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1007 | |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 1008 | // Calculate 2^T, at width T+W. |
Benjamin Kramer | fc3ea6f | 2013-07-11 16:05:50 +0000 | [diff] [blame] | 1009 | APInt DivFactor = APInt::getOneBitSet(CalculationBits, T); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1010 | |
| 1011 | // Calculate the multiplicative inverse of K! / 2^T; |
| 1012 | // this multiplication factor will perform the exact division by |
| 1013 | // K! / 2^T. |
| 1014 | APInt Mod = APInt::getSignedMinValue(W+1); |
| 1015 | APInt MultiplyFactor = OddFactorial.zext(W+1); |
| 1016 | MultiplyFactor = MultiplyFactor.multiplicativeInverse(Mod); |
| 1017 | MultiplyFactor = MultiplyFactor.trunc(W); |
| 1018 | |
| 1019 | // Calculate the product, at width T+W |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1020 | IntegerType *CalculationTy = IntegerType::get(SE.getContext(), |
Owen Anderson | 55f1c09 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 1021 | CalculationBits); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1022 | const SCEV *Dividend = SE.getTruncateOrZeroExtend(It, CalculationTy); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1023 | for (unsigned i = 1; i != K; ++i) { |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 1024 | const SCEV *S = SE.getMinusSCEV(It, SE.getConstant(It->getType(), i)); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1025 | Dividend = SE.getMulExpr(Dividend, |
| 1026 | SE.getTruncateOrZeroExtend(S, CalculationTy)); |
| 1027 | } |
| 1028 | |
| 1029 | // Divide by 2^T |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1030 | const SCEV *DivResult = SE.getUDivExpr(Dividend, SE.getConstant(DivFactor)); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1031 | |
| 1032 | // Truncate the result, and divide by K! / 2^T. |
| 1033 | |
| 1034 | return SE.getMulExpr(SE.getConstant(MultiplyFactor), |
| 1035 | SE.getTruncateOrZeroExtend(DivResult, ResultTy)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1036 | } |
| 1037 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1038 | /// evaluateAtIteration - Return the value of this chain of recurrences at |
| 1039 | /// the specified iteration number. We can evaluate this recurrence by |
| 1040 | /// multiplying each element in the chain by the binomial coefficient |
| 1041 | /// corresponding to it. In other words, we can evaluate {A,+,B,+,C,+,D} as: |
| 1042 | /// |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1043 | /// A*BC(It, 0) + B*BC(It, 1) + C*BC(It, 2) + D*BC(It, 3) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1044 | /// |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1045 | /// where BC(It, k) stands for binomial coefficient. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1046 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1047 | const SCEV *SCEVAddRecExpr::evaluateAtIteration(const SCEV *It, |
Dan Gohman | 32291b1 | 2009-07-21 00:38:55 +0000 | [diff] [blame] | 1048 | ScalarEvolution &SE) const { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1049 | const SCEV *Result = getStart(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1050 | for (unsigned i = 1, e = getNumOperands(); i != e; ++i) { |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1051 | // The computation is correct in the face of overflow provided that the |
| 1052 | // multiplication is performed _after_ the evaluation of the binomial |
| 1053 | // coefficient. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1054 | const SCEV *Coeff = BinomialCoefficient(It, i, SE, getType()); |
Nick Lewycky | 707663e | 2008-10-13 03:58:02 +0000 | [diff] [blame] | 1055 | if (isa<SCEVCouldNotCompute>(Coeff)) |
| 1056 | return Coeff; |
| 1057 | |
| 1058 | Result = SE.getAddExpr(Result, SE.getMulExpr(getOperand(i), Coeff)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1059 | } |
| 1060 | return Result; |
| 1061 | } |
| 1062 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1063 | //===----------------------------------------------------------------------===// |
| 1064 | // SCEV Expression folder implementations |
| 1065 | //===----------------------------------------------------------------------===// |
| 1066 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1067 | const SCEV *ScalarEvolution::getTruncateExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1068 | Type *Ty) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1069 | assert(getTypeSizeInBits(Op->getType()) > getTypeSizeInBits(Ty) && |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1070 | "This is not a truncating conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1071 | assert(isSCEVable(Ty) && |
| 1072 | "This is not a conversion to a SCEVable type!"); |
| 1073 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1074 | |
Dan Gohman | 3a302cb | 2009-07-13 20:50:19 +0000 | [diff] [blame] | 1075 | FoldingSetNodeID ID; |
| 1076 | ID.AddInteger(scTruncate); |
| 1077 | ID.AddPointer(Op); |
| 1078 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1079 | void *IP = nullptr; |
Dan Gohman | 3a302cb | 2009-07-13 20:50:19 +0000 | [diff] [blame] | 1080 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
| 1081 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1082 | // Fold if the operand is constant. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1083 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
Dan Gohman | 8d7576e | 2009-06-24 00:38:39 +0000 | [diff] [blame] | 1084 | return getConstant( |
Nuno Lopes | ab5c924 | 2012-05-15 15:44:38 +0000 | [diff] [blame] | 1085 | cast<ConstantInt>(ConstantExpr::getTrunc(SC->getValue(), Ty))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1086 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1087 | // trunc(trunc(x)) --> trunc(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1088 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1089 | return getTruncateExpr(ST->getOperand(), Ty); |
| 1090 | |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1091 | // trunc(sext(x)) --> sext(x) if widening or trunc(x) if narrowing |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1092 | if (const SCEVSignExtendExpr *SS = dyn_cast<SCEVSignExtendExpr>(Op)) |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1093 | return getTruncateOrSignExtend(SS->getOperand(), Ty); |
| 1094 | |
| 1095 | // trunc(zext(x)) --> zext(x) if widening or trunc(x) if narrowing |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1096 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1097 | return getTruncateOrZeroExtend(SZ->getOperand(), Ty); |
| 1098 | |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1099 | // trunc(x1+x2+...+xN) --> trunc(x1)+trunc(x2)+...+trunc(xN) if we can |
Nick Lewycky | 2ce2832 | 2015-03-20 02:52:23 +0000 | [diff] [blame] | 1100 | // eliminate all the truncates, or we replace other casts with truncates. |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1101 | if (const SCEVAddExpr *SA = dyn_cast<SCEVAddExpr>(Op)) { |
| 1102 | SmallVector<const SCEV *, 4> Operands; |
| 1103 | bool hasTrunc = false; |
| 1104 | for (unsigned i = 0, e = SA->getNumOperands(); i != e && !hasTrunc; ++i) { |
| 1105 | const SCEV *S = getTruncateExpr(SA->getOperand(i), Ty); |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1106 | if (!isa<SCEVCastExpr>(SA->getOperand(i))) |
| 1107 | hasTrunc = isa<SCEVTruncateExpr>(S); |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1108 | Operands.push_back(S); |
| 1109 | } |
| 1110 | if (!hasTrunc) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1111 | return getAddExpr(Operands); |
Nick Lewycky | d9e6b4a | 2011-01-26 08:40:22 +0000 | [diff] [blame] | 1112 | UniqueSCEVs.FindNodeOrInsertPos(ID, IP); // Mutates IP, returns NULL. |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1113 | } |
| 1114 | |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1115 | // trunc(x1*x2*...*xN) --> trunc(x1)*trunc(x2)*...*trunc(xN) if we can |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1116 | // eliminate all the truncates, or we replace other casts with truncates. |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1117 | if (const SCEVMulExpr *SM = dyn_cast<SCEVMulExpr>(Op)) { |
| 1118 | SmallVector<const SCEV *, 4> Operands; |
| 1119 | bool hasTrunc = false; |
| 1120 | for (unsigned i = 0, e = SM->getNumOperands(); i != e && !hasTrunc; ++i) { |
| 1121 | const SCEV *S = getTruncateExpr(SM->getOperand(i), Ty); |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1122 | if (!isa<SCEVCastExpr>(SM->getOperand(i))) |
| 1123 | hasTrunc = isa<SCEVTruncateExpr>(S); |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1124 | Operands.push_back(S); |
| 1125 | } |
| 1126 | if (!hasTrunc) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1127 | return getMulExpr(Operands); |
Nick Lewycky | d9e6b4a | 2011-01-26 08:40:22 +0000 | [diff] [blame] | 1128 | UniqueSCEVs.FindNodeOrInsertPos(ID, IP); // Mutates IP, returns NULL. |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1129 | } |
| 1130 | |
Dan Gohman | 5a728c9 | 2009-06-18 16:24:47 +0000 | [diff] [blame] | 1131 | // If the input value is a chrec scev, truncate the chrec's operands. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1132 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(Op)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1133 | SmallVector<const SCEV *, 4> Operands; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1134 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 1135 | Operands.push_back(getTruncateExpr(AddRec->getOperand(i), Ty)); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1136 | return getAddRecExpr(Operands, AddRec->getLoop(), SCEV::FlagAnyWrap); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1137 | } |
| 1138 | |
Dan Gohman | 89dd42a | 2010-06-25 18:47:08 +0000 | [diff] [blame] | 1139 | // The cast wasn't folded; create an explicit cast node. We can reuse |
| 1140 | // the existing insert position since if we get here, we won't have |
| 1141 | // made any changes which would invalidate it. |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 1142 | SCEV *S = new (SCEVAllocator) SCEVTruncateExpr(ID.Intern(SCEVAllocator), |
| 1143 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1144 | UniqueSCEVs.InsertNode(S, IP); |
| 1145 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1146 | } |
| 1147 | |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1148 | // Get the limit of a recurrence such that incrementing by Step cannot cause |
| 1149 | // signed overflow as long as the value of the recurrence within the |
| 1150 | // loop does not exceed this limit before incrementing. |
| 1151 | static const SCEV *getSignedOverflowLimitForStep(const SCEV *Step, |
| 1152 | ICmpInst::Predicate *Pred, |
| 1153 | ScalarEvolution *SE) { |
| 1154 | unsigned BitWidth = SE->getTypeSizeInBits(Step->getType()); |
| 1155 | if (SE->isKnownPositive(Step)) { |
| 1156 | *Pred = ICmpInst::ICMP_SLT; |
| 1157 | return SE->getConstant(APInt::getSignedMinValue(BitWidth) - |
| 1158 | SE->getSignedRange(Step).getSignedMax()); |
| 1159 | } |
| 1160 | if (SE->isKnownNegative(Step)) { |
| 1161 | *Pred = ICmpInst::ICMP_SGT; |
| 1162 | return SE->getConstant(APInt::getSignedMaxValue(BitWidth) - |
| 1163 | SE->getSignedRange(Step).getSignedMin()); |
| 1164 | } |
| 1165 | return nullptr; |
| 1166 | } |
| 1167 | |
| 1168 | // Get the limit of a recurrence such that incrementing by Step cannot cause |
| 1169 | // unsigned overflow as long as the value of the recurrence within the loop does |
| 1170 | // not exceed this limit before incrementing. |
| 1171 | static const SCEV *getUnsignedOverflowLimitForStep(const SCEV *Step, |
| 1172 | ICmpInst::Predicate *Pred, |
| 1173 | ScalarEvolution *SE) { |
| 1174 | unsigned BitWidth = SE->getTypeSizeInBits(Step->getType()); |
| 1175 | *Pred = ICmpInst::ICMP_ULT; |
| 1176 | |
| 1177 | return SE->getConstant(APInt::getMinValue(BitWidth) - |
| 1178 | SE->getUnsignedRange(Step).getUnsignedMax()); |
| 1179 | } |
| 1180 | |
| 1181 | namespace { |
| 1182 | |
| 1183 | struct ExtendOpTraitsBase { |
| 1184 | typedef const SCEV *(ScalarEvolution::*GetExtendExprTy)(const SCEV *, Type *); |
| 1185 | }; |
| 1186 | |
| 1187 | // Used to make code generic over signed and unsigned overflow. |
| 1188 | template <typename ExtendOp> struct ExtendOpTraits { |
| 1189 | // Members present: |
| 1190 | // |
| 1191 | // static const SCEV::NoWrapFlags WrapType; |
| 1192 | // |
| 1193 | // static const ExtendOpTraitsBase::GetExtendExprTy GetExtendExpr; |
| 1194 | // |
| 1195 | // static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1196 | // ICmpInst::Predicate *Pred, |
| 1197 | // ScalarEvolution *SE); |
| 1198 | }; |
| 1199 | |
| 1200 | template <> |
| 1201 | struct ExtendOpTraits<SCEVSignExtendExpr> : public ExtendOpTraitsBase { |
| 1202 | static const SCEV::NoWrapFlags WrapType = SCEV::FlagNSW; |
| 1203 | |
| 1204 | static const GetExtendExprTy GetExtendExpr; |
| 1205 | |
| 1206 | static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1207 | ICmpInst::Predicate *Pred, |
| 1208 | ScalarEvolution *SE) { |
| 1209 | return getSignedOverflowLimitForStep(Step, Pred, SE); |
| 1210 | } |
| 1211 | }; |
| 1212 | |
Sanjoy Das | c1065b9 | 2015-02-18 08:03:22 +0000 | [diff] [blame] | 1213 | const ExtendOpTraitsBase::GetExtendExprTy ExtendOpTraits< |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1214 | SCEVSignExtendExpr>::GetExtendExpr = &ScalarEvolution::getSignExtendExpr; |
| 1215 | |
| 1216 | template <> |
| 1217 | struct ExtendOpTraits<SCEVZeroExtendExpr> : public ExtendOpTraitsBase { |
| 1218 | static const SCEV::NoWrapFlags WrapType = SCEV::FlagNUW; |
| 1219 | |
| 1220 | static const GetExtendExprTy GetExtendExpr; |
| 1221 | |
| 1222 | static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1223 | ICmpInst::Predicate *Pred, |
| 1224 | ScalarEvolution *SE) { |
| 1225 | return getUnsignedOverflowLimitForStep(Step, Pred, SE); |
| 1226 | } |
| 1227 | }; |
| 1228 | |
Sanjoy Das | c1065b9 | 2015-02-18 08:03:22 +0000 | [diff] [blame] | 1229 | const ExtendOpTraitsBase::GetExtendExprTy ExtendOpTraits< |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1230 | SCEVZeroExtendExpr>::GetExtendExpr = &ScalarEvolution::getZeroExtendExpr; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 1231 | } |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1232 | |
| 1233 | // The recurrence AR has been shown to have no signed/unsigned wrap or something |
| 1234 | // close to it. Typically, if we can prove NSW/NUW for AR, then we can just as |
| 1235 | // easily prove NSW/NUW for its preincrement or postincrement sibling. This |
| 1236 | // allows normalizing a sign/zero extended AddRec as such: {sext/zext(Step + |
| 1237 | // Start),+,Step} => {(Step + sext/zext(Start),+,Step} As a result, the |
| 1238 | // expression "Step + sext/zext(PreIncAR)" is congruent with |
| 1239 | // "sext/zext(PostIncAR)" |
| 1240 | template <typename ExtendOpTy> |
| 1241 | static const SCEV *getPreStartForExtend(const SCEVAddRecExpr *AR, Type *Ty, |
| 1242 | ScalarEvolution *SE) { |
| 1243 | auto WrapType = ExtendOpTraits<ExtendOpTy>::WrapType; |
| 1244 | auto GetExtendExpr = ExtendOpTraits<ExtendOpTy>::GetExtendExpr; |
| 1245 | |
| 1246 | const Loop *L = AR->getLoop(); |
| 1247 | const SCEV *Start = AR->getStart(); |
| 1248 | const SCEV *Step = AR->getStepRecurrence(*SE); |
| 1249 | |
| 1250 | // Check for a simple looking step prior to loop entry. |
| 1251 | const SCEVAddExpr *SA = dyn_cast<SCEVAddExpr>(Start); |
| 1252 | if (!SA) |
| 1253 | return nullptr; |
| 1254 | |
| 1255 | // Create an AddExpr for "PreStart" after subtracting Step. Full SCEV |
| 1256 | // subtraction is expensive. For this purpose, perform a quick and dirty |
| 1257 | // difference, by checking for Step in the operand list. |
| 1258 | SmallVector<const SCEV *, 4> DiffOps; |
| 1259 | for (const SCEV *Op : SA->operands()) |
| 1260 | if (Op != Step) |
| 1261 | DiffOps.push_back(Op); |
| 1262 | |
| 1263 | if (DiffOps.size() == SA->getNumOperands()) |
| 1264 | return nullptr; |
| 1265 | |
| 1266 | // Try to prove `WrapType` (SCEV::FlagNSW or SCEV::FlagNUW) on `PreStart` + |
| 1267 | // `Step`: |
| 1268 | |
| 1269 | // 1. NSW/NUW flags on the step increment. |
| 1270 | const SCEV *PreStart = SE->getAddExpr(DiffOps, SA->getNoWrapFlags()); |
| 1271 | const SCEVAddRecExpr *PreAR = dyn_cast<SCEVAddRecExpr>( |
| 1272 | SE->getAddRecExpr(PreStart, Step, L, SCEV::FlagAnyWrap)); |
| 1273 | |
Sanjoy Das | b14010d | 2015-02-24 01:02:42 +0000 | [diff] [blame] | 1274 | // "{S,+,X} is <nsw>/<nuw>" and "the backedge is taken at least once" implies |
| 1275 | // "S+X does not sign/unsign-overflow". |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1276 | // |
| 1277 | |
Sanjoy Das | b14010d | 2015-02-24 01:02:42 +0000 | [diff] [blame] | 1278 | const SCEV *BECount = SE->getBackedgeTakenCount(L); |
| 1279 | if (PreAR && PreAR->getNoWrapFlags(WrapType) && |
| 1280 | !isa<SCEVCouldNotCompute>(BECount) && SE->isKnownPositive(BECount)) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1281 | return PreStart; |
| 1282 | |
| 1283 | // 2. Direct overflow check on the step operation's expression. |
| 1284 | unsigned BitWidth = SE->getTypeSizeInBits(AR->getType()); |
| 1285 | Type *WideTy = IntegerType::get(SE->getContext(), BitWidth * 2); |
| 1286 | const SCEV *OperandExtendedStart = |
| 1287 | SE->getAddExpr((SE->*GetExtendExpr)(PreStart, WideTy), |
| 1288 | (SE->*GetExtendExpr)(Step, WideTy)); |
| 1289 | if ((SE->*GetExtendExpr)(Start, WideTy) == OperandExtendedStart) { |
| 1290 | if (PreAR && AR->getNoWrapFlags(WrapType)) { |
| 1291 | // If we know `AR` == {`PreStart`+`Step`,+,`Step`} is `WrapType` (FlagNSW |
| 1292 | // or FlagNUW) and that `PreStart` + `Step` is `WrapType` too, then |
| 1293 | // `PreAR` == {`PreStart`,+,`Step`} is also `WrapType`. Cache this fact. |
| 1294 | const_cast<SCEVAddRecExpr *>(PreAR)->setNoWrapFlags(WrapType); |
| 1295 | } |
| 1296 | return PreStart; |
| 1297 | } |
| 1298 | |
| 1299 | // 3. Loop precondition. |
| 1300 | ICmpInst::Predicate Pred; |
| 1301 | const SCEV *OverflowLimit = |
| 1302 | ExtendOpTraits<ExtendOpTy>::getOverflowLimitForStep(Step, &Pred, SE); |
| 1303 | |
| 1304 | if (OverflowLimit && |
| 1305 | SE->isLoopEntryGuardedByCond(L, Pred, PreStart, OverflowLimit)) { |
| 1306 | return PreStart; |
| 1307 | } |
| 1308 | return nullptr; |
| 1309 | } |
| 1310 | |
| 1311 | // Get the normalized zero or sign extended expression for this AddRec's Start. |
| 1312 | template <typename ExtendOpTy> |
| 1313 | static const SCEV *getExtendAddRecStart(const SCEVAddRecExpr *AR, Type *Ty, |
| 1314 | ScalarEvolution *SE) { |
| 1315 | auto GetExtendExpr = ExtendOpTraits<ExtendOpTy>::GetExtendExpr; |
| 1316 | |
| 1317 | const SCEV *PreStart = getPreStartForExtend<ExtendOpTy>(AR, Ty, SE); |
| 1318 | if (!PreStart) |
| 1319 | return (SE->*GetExtendExpr)(AR->getStart(), Ty); |
| 1320 | |
| 1321 | return SE->getAddExpr((SE->*GetExtendExpr)(AR->getStepRecurrence(*SE), Ty), |
| 1322 | (SE->*GetExtendExpr)(PreStart, Ty)); |
| 1323 | } |
| 1324 | |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1325 | // Try to prove away overflow by looking at "nearby" add recurrences. A |
| 1326 | // motivating example for this rule: if we know `{0,+,4}` is `ult` `-1` and it |
| 1327 | // does not itself wrap then we can conclude that `{1,+,4}` is `nuw`. |
| 1328 | // |
| 1329 | // Formally: |
| 1330 | // |
| 1331 | // {S,+,X} == {S-T,+,X} + T |
| 1332 | // => Ext({S,+,X}) == Ext({S-T,+,X} + T) |
| 1333 | // |
| 1334 | // If ({S-T,+,X} + T) does not overflow ... (1) |
| 1335 | // |
| 1336 | // RHS == Ext({S-T,+,X} + T) == Ext({S-T,+,X}) + Ext(T) |
| 1337 | // |
| 1338 | // If {S-T,+,X} does not overflow ... (2) |
| 1339 | // |
| 1340 | // RHS == Ext({S-T,+,X}) + Ext(T) == {Ext(S-T),+,Ext(X)} + Ext(T) |
| 1341 | // == {Ext(S-T)+Ext(T),+,Ext(X)} |
| 1342 | // |
| 1343 | // If (S-T)+T does not overflow ... (3) |
| 1344 | // |
| 1345 | // RHS == {Ext(S-T)+Ext(T),+,Ext(X)} == {Ext(S-T+T),+,Ext(X)} |
| 1346 | // == {Ext(S),+,Ext(X)} == LHS |
| 1347 | // |
| 1348 | // Thus, if (1), (2) and (3) are true for some T, then |
| 1349 | // Ext({S,+,X}) == {Ext(S),+,Ext(X)} |
| 1350 | // |
| 1351 | // (3) is implied by (1) -- "(S-T)+T does not overflow" is simply "({S-T,+,X}+T) |
| 1352 | // does not overflow" restricted to the 0th iteration. Therefore we only need |
| 1353 | // to check for (1) and (2). |
| 1354 | // |
| 1355 | // In the current context, S is `Start`, X is `Step`, Ext is `ExtendOpTy` and T |
| 1356 | // is `Delta` (defined below). |
| 1357 | // |
| 1358 | template <typename ExtendOpTy> |
| 1359 | bool ScalarEvolution::proveNoWrapByVaryingStart(const SCEV *Start, |
| 1360 | const SCEV *Step, |
| 1361 | const Loop *L) { |
| 1362 | auto WrapType = ExtendOpTraits<ExtendOpTy>::WrapType; |
| 1363 | |
| 1364 | // We restrict `Start` to a constant to prevent SCEV from spending too much |
| 1365 | // time here. It is correct (but more expensive) to continue with a |
| 1366 | // non-constant `Start` and do a general SCEV subtraction to compute |
| 1367 | // `PreStart` below. |
| 1368 | // |
| 1369 | const SCEVConstant *StartC = dyn_cast<SCEVConstant>(Start); |
| 1370 | if (!StartC) |
| 1371 | return false; |
| 1372 | |
| 1373 | APInt StartAI = StartC->getValue()->getValue(); |
| 1374 | |
| 1375 | for (unsigned Delta : {-2, -1, 1, 2}) { |
| 1376 | const SCEV *PreStart = getConstant(StartAI - Delta); |
| 1377 | |
| 1378 | // Give up if we don't already have the add recurrence we need because |
| 1379 | // actually constructing an add recurrence is relatively expensive. |
| 1380 | const SCEVAddRecExpr *PreAR = [&]() { |
| 1381 | FoldingSetNodeID ID; |
| 1382 | ID.AddInteger(scAddRecExpr); |
| 1383 | ID.AddPointer(PreStart); |
| 1384 | ID.AddPointer(Step); |
| 1385 | ID.AddPointer(L); |
| 1386 | void *IP = nullptr; |
| 1387 | return static_cast<SCEVAddRecExpr *>( |
NAKAMURA Takumi | 8f49dd3 | 2015-03-05 01:02:45 +0000 | [diff] [blame] | 1388 | this->UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1389 | }(); |
| 1390 | |
| 1391 | if (PreAR && PreAR->getNoWrapFlags(WrapType)) { // proves (2) |
| 1392 | const SCEV *DeltaS = getConstant(StartC->getType(), Delta); |
| 1393 | ICmpInst::Predicate Pred = ICmpInst::BAD_ICMP_PREDICATE; |
| 1394 | const SCEV *Limit = ExtendOpTraits<ExtendOpTy>::getOverflowLimitForStep( |
| 1395 | DeltaS, &Pred, this); |
| 1396 | if (Limit && isKnownPredicate(Pred, PreAR, Limit)) // proves (1) |
| 1397 | return true; |
| 1398 | } |
| 1399 | } |
| 1400 | |
| 1401 | return false; |
| 1402 | } |
| 1403 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1404 | const SCEV *ScalarEvolution::getZeroExtendExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1405 | Type *Ty) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1406 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
Dan Gohman | c1c2ba7 | 2009-04-16 19:25:55 +0000 | [diff] [blame] | 1407 | "This is not an extending conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1408 | assert(isSCEVable(Ty) && |
| 1409 | "This is not a conversion to a SCEVable type!"); |
| 1410 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | c1c2ba7 | 2009-04-16 19:25:55 +0000 | [diff] [blame] | 1411 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1412 | // Fold if the operand is constant. |
Dan Gohman | 5235cc2 | 2010-06-24 16:47:03 +0000 | [diff] [blame] | 1413 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
| 1414 | return getConstant( |
Nuno Lopes | ab5c924 | 2012-05-15 15:44:38 +0000 | [diff] [blame] | 1415 | cast<ConstantInt>(ConstantExpr::getZExt(SC->getValue(), Ty))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1416 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1417 | // zext(zext(x)) --> zext(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1418 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1419 | return getZeroExtendExpr(SZ->getOperand(), Ty); |
| 1420 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1421 | // Before doing any expensive analysis, check to see if we've already |
| 1422 | // computed a SCEV for this Op and Ty. |
| 1423 | FoldingSetNodeID ID; |
| 1424 | ID.AddInteger(scZeroExtend); |
| 1425 | ID.AddPointer(Op); |
| 1426 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1427 | void *IP = nullptr; |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1428 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
| 1429 | |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1430 | // zext(trunc(x)) --> zext(x) or x or trunc(x) |
| 1431 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) { |
| 1432 | // It's possible the bits taken off by the truncate were all zero bits. If |
| 1433 | // so, we should be able to simplify this further. |
| 1434 | const SCEV *X = ST->getOperand(); |
| 1435 | ConstantRange CR = getUnsignedRange(X); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1436 | unsigned TruncBits = getTypeSizeInBits(ST->getType()); |
| 1437 | unsigned NewBits = getTypeSizeInBits(Ty); |
| 1438 | if (CR.truncate(TruncBits).zeroExtend(NewBits).contains( |
Nick Lewycky | d4192f7 | 2011-01-23 20:06:05 +0000 | [diff] [blame] | 1439 | CR.zextOrTrunc(NewBits))) |
| 1440 | return getTruncateOrZeroExtend(X, Ty); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1441 | } |
| 1442 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1443 | // If the input value is a chrec scev, and we can prove that the value |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1444 | // did not overflow the old, smaller, value, we can zero extend all of the |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1445 | // operands (often constants). This allows analysis of something like |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1446 | // this: for (unsigned char X = 0; X < 100; ++X) { int Y = X; } |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1447 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1448 | if (AR->isAffine()) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1449 | const SCEV *Start = AR->getStart(); |
| 1450 | const SCEV *Step = AR->getStepRecurrence(*this); |
| 1451 | unsigned BitWidth = getTypeSizeInBits(AR->getType()); |
| 1452 | const Loop *L = AR->getLoop(); |
| 1453 | |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1454 | // If we have special knowledge that this addrec won't overflow, |
| 1455 | // we don't need to do any further analysis. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1456 | if (AR->getNoWrapFlags(SCEV::FlagNUW)) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1457 | return getAddRecExpr( |
| 1458 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1459 | getZeroExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1460 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1461 | // Check whether the backedge-taken count is SCEVCouldNotCompute. |
| 1462 | // Note that this serves two purposes: It filters out loops that are |
| 1463 | // simply not analyzable, and it covers the case where this code is |
| 1464 | // being called from within backedge-taken count analysis, such that |
| 1465 | // attempting to ask for the backedge-taken count would likely result |
| 1466 | // in infinite recursion. In the later case, the analysis code will |
| 1467 | // cope with a conservative value, and it will take care to purge |
| 1468 | // that value once it has finished. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1469 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(L); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1470 | if (!isa<SCEVCouldNotCompute>(MaxBECount)) { |
Dan Gohman | 95c5b0e | 2009-04-29 01:54:20 +0000 | [diff] [blame] | 1471 | // Manually compute the final value for AR, checking for |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1472 | // overflow. |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1473 | |
| 1474 | // Check whether the backedge-taken count can be losslessly casted to |
| 1475 | // the addrec's type. The count is always unsigned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1476 | const SCEV *CastedMaxBECount = |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1477 | getTruncateOrZeroExtend(MaxBECount, Start->getType()); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1478 | const SCEV *RecastedMaxBECount = |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1479 | getTruncateOrZeroExtend(CastedMaxBECount, MaxBECount->getType()); |
| 1480 | if (MaxBECount == RecastedMaxBECount) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1481 | Type *WideTy = IntegerType::get(getContext(), BitWidth * 2); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1482 | // Check whether Start+Step*MaxBECount has no unsigned overflow. |
Dan Gohman | 007f504 | 2010-02-24 19:31:06 +0000 | [diff] [blame] | 1483 | const SCEV *ZMul = getMulExpr(CastedMaxBECount, Step); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1484 | const SCEV *ZAdd = getZeroExtendExpr(getAddExpr(Start, ZMul), WideTy); |
| 1485 | const SCEV *WideStart = getZeroExtendExpr(Start, WideTy); |
| 1486 | const SCEV *WideMaxBECount = |
| 1487 | getZeroExtendExpr(CastedMaxBECount, WideTy); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1488 | const SCEV *OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1489 | getAddExpr(WideStart, |
| 1490 | getMulExpr(WideMaxBECount, |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1491 | getZeroExtendExpr(Step, WideTy))); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1492 | if (ZAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1493 | // Cache knowledge of AR NUW, which is propagated to this AddRec. |
| 1494 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1495 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1496 | return getAddRecExpr( |
| 1497 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1498 | getZeroExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1499 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1500 | // Similar to above, only this time treat the step value as signed. |
| 1501 | // This covers loops that count down. |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1502 | OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1503 | getAddExpr(WideStart, |
| 1504 | getMulExpr(WideMaxBECount, |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1505 | getSignExtendExpr(Step, WideTy))); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1506 | if (ZAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1507 | // Cache knowledge of AR NW, which is propagated to this AddRec. |
| 1508 | // Negative step causes unsigned wrap, but it still can't self-wrap. |
| 1509 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1510 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1511 | return getAddRecExpr( |
| 1512 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1513 | getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1514 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1515 | } |
| 1516 | |
| 1517 | // If the backedge is guarded by a comparison with the pre-inc value |
| 1518 | // the addrec is safe. Also, if the entry is guarded by a comparison |
| 1519 | // with the start value and the backedge is guarded by a comparison |
| 1520 | // with the post-inc value, the addrec is safe. |
| 1521 | if (isKnownPositive(Step)) { |
| 1522 | const SCEV *N = getConstant(APInt::getMinValue(BitWidth) - |
| 1523 | getUnsignedRange(Step).getUnsignedMax()); |
| 1524 | if (isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_ULT, AR, N) || |
Dan Gohman | b50349a | 2010-04-11 19:27:13 +0000 | [diff] [blame] | 1525 | (isLoopEntryGuardedByCond(L, ICmpInst::ICMP_ULT, Start, N) && |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1526 | isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_ULT, |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1527 | AR->getPostIncExpr(*this), N))) { |
| 1528 | // Cache knowledge of AR NUW, which is propagated to this AddRec. |
| 1529 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1530 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1531 | return getAddRecExpr( |
| 1532 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1533 | getZeroExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1534 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1535 | } else if (isKnownNegative(Step)) { |
| 1536 | const SCEV *N = getConstant(APInt::getMaxValue(BitWidth) - |
| 1537 | getSignedRange(Step).getSignedMin()); |
Dan Gohman | 5f18c54 | 2010-05-04 01:11:15 +0000 | [diff] [blame] | 1538 | if (isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_UGT, AR, N) || |
| 1539 | (isLoopEntryGuardedByCond(L, ICmpInst::ICMP_UGT, Start, N) && |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1540 | isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_UGT, |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1541 | AR->getPostIncExpr(*this), N))) { |
| 1542 | // Cache knowledge of AR NW, which is propagated to this AddRec. |
| 1543 | // Negative step causes unsigned wrap, but it still can't self-wrap. |
| 1544 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
| 1545 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1546 | return getAddRecExpr( |
| 1547 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1548 | getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1549 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1550 | } |
| 1551 | } |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1552 | |
| 1553 | if (proveNoWrapByVaryingStart<SCEVZeroExtendExpr>(Start, Step, L)) { |
| 1554 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
| 1555 | return getAddRecExpr( |
| 1556 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this), |
| 1557 | getZeroExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
| 1558 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1559 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1560 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1561 | // The cast wasn't folded; create an explicit cast node. |
| 1562 | // Recompute the insert position, as it may have been invalidated. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1563 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 1564 | SCEV *S = new (SCEVAllocator) SCEVZeroExtendExpr(ID.Intern(SCEVAllocator), |
| 1565 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1566 | UniqueSCEVs.InsertNode(S, IP); |
| 1567 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1568 | } |
| 1569 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1570 | const SCEV *ScalarEvolution::getSignExtendExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1571 | Type *Ty) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1572 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1573 | "This is not an extending conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1574 | assert(isSCEVable(Ty) && |
| 1575 | "This is not a conversion to a SCEVable type!"); |
| 1576 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1577 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1578 | // Fold if the operand is constant. |
Dan Gohman | 5235cc2 | 2010-06-24 16:47:03 +0000 | [diff] [blame] | 1579 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
| 1580 | return getConstant( |
Nuno Lopes | ab5c924 | 2012-05-15 15:44:38 +0000 | [diff] [blame] | 1581 | cast<ConstantInt>(ConstantExpr::getSExt(SC->getValue(), Ty))); |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1582 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1583 | // sext(sext(x)) --> sext(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1584 | if (const SCEVSignExtendExpr *SS = dyn_cast<SCEVSignExtendExpr>(Op)) |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1585 | return getSignExtendExpr(SS->getOperand(), Ty); |
| 1586 | |
Nick Lewycky | e9ea75e | 2011-01-19 15:56:12 +0000 | [diff] [blame] | 1587 | // sext(zext(x)) --> zext(x) |
| 1588 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
| 1589 | return getZeroExtendExpr(SZ->getOperand(), Ty); |
| 1590 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1591 | // Before doing any expensive analysis, check to see if we've already |
| 1592 | // computed a SCEV for this Op and Ty. |
| 1593 | FoldingSetNodeID ID; |
| 1594 | ID.AddInteger(scSignExtend); |
| 1595 | ID.AddPointer(Op); |
| 1596 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1597 | void *IP = nullptr; |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1598 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
| 1599 | |
Nick Lewycky | b32c894 | 2011-01-22 22:06:21 +0000 | [diff] [blame] | 1600 | // If the input value is provably positive, build a zext instead. |
| 1601 | if (isKnownNonNegative(Op)) |
| 1602 | return getZeroExtendExpr(Op, Ty); |
| 1603 | |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1604 | // sext(trunc(x)) --> sext(x) or x or trunc(x) |
| 1605 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) { |
| 1606 | // It's possible the bits taken off by the truncate were all sign bits. If |
| 1607 | // so, we should be able to simplify this further. |
| 1608 | const SCEV *X = ST->getOperand(); |
| 1609 | ConstantRange CR = getSignedRange(X); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1610 | unsigned TruncBits = getTypeSizeInBits(ST->getType()); |
| 1611 | unsigned NewBits = getTypeSizeInBits(Ty); |
| 1612 | if (CR.truncate(TruncBits).signExtend(NewBits).contains( |
Nick Lewycky | d4192f7 | 2011-01-23 20:06:05 +0000 | [diff] [blame] | 1613 | CR.sextOrTrunc(NewBits))) |
| 1614 | return getTruncateOrSignExtend(X, Ty); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1615 | } |
| 1616 | |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1617 | // sext(C1 + (C2 * x)) --> C1 + sext(C2 * x) if C1 < C2 |
| 1618 | if (auto SA = dyn_cast<SCEVAddExpr>(Op)) { |
| 1619 | if (SA->getNumOperands() == 2) { |
| 1620 | auto SC1 = dyn_cast<SCEVConstant>(SA->getOperand(0)); |
| 1621 | auto SMul = dyn_cast<SCEVMulExpr>(SA->getOperand(1)); |
| 1622 | if (SMul && SC1) { |
| 1623 | if (auto SC2 = dyn_cast<SCEVConstant>(SMul->getOperand(0))) { |
Michael Zolotukhin | 265dfa4 | 2014-05-26 14:49:46 +0000 | [diff] [blame] | 1624 | const APInt &C1 = SC1->getValue()->getValue(); |
| 1625 | const APInt &C2 = SC2->getValue()->getValue(); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1626 | if (C1.isStrictlyPositive() && C2.isStrictlyPositive() && |
Michael Zolotukhin | 265dfa4 | 2014-05-26 14:49:46 +0000 | [diff] [blame] | 1627 | C2.ugt(C1) && C2.isPowerOf2()) |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1628 | return getAddExpr(getSignExtendExpr(SC1, Ty), |
| 1629 | getSignExtendExpr(SMul, Ty)); |
| 1630 | } |
| 1631 | } |
| 1632 | } |
| 1633 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1634 | // If the input value is a chrec scev, and we can prove that the value |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1635 | // did not overflow the old, smaller, value, we can sign extend all of the |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1636 | // operands (often constants). This allows analysis of something like |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1637 | // this: for (signed char X = 0; X < 100; ++X) { int Y = X; } |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1638 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1639 | if (AR->isAffine()) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1640 | const SCEV *Start = AR->getStart(); |
| 1641 | const SCEV *Step = AR->getStepRecurrence(*this); |
| 1642 | unsigned BitWidth = getTypeSizeInBits(AR->getType()); |
| 1643 | const Loop *L = AR->getLoop(); |
| 1644 | |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1645 | // If we have special knowledge that this addrec won't overflow, |
| 1646 | // we don't need to do any further analysis. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1647 | if (AR->getNoWrapFlags(SCEV::FlagNSW)) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1648 | return getAddRecExpr( |
| 1649 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this), |
| 1650 | getSignExtendExpr(Step, Ty), L, SCEV::FlagNSW); |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1651 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1652 | // Check whether the backedge-taken count is SCEVCouldNotCompute. |
| 1653 | // Note that this serves two purposes: It filters out loops that are |
| 1654 | // simply not analyzable, and it covers the case where this code is |
| 1655 | // being called from within backedge-taken count analysis, such that |
| 1656 | // attempting to ask for the backedge-taken count would likely result |
| 1657 | // in infinite recursion. In the later case, the analysis code will |
| 1658 | // cope with a conservative value, and it will take care to purge |
| 1659 | // that value once it has finished. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1660 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(L); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1661 | if (!isa<SCEVCouldNotCompute>(MaxBECount)) { |
Dan Gohman | 95c5b0e | 2009-04-29 01:54:20 +0000 | [diff] [blame] | 1662 | // Manually compute the final value for AR, checking for |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1663 | // overflow. |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1664 | |
| 1665 | // Check whether the backedge-taken count can be losslessly casted to |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1666 | // the addrec's type. The count is always unsigned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1667 | const SCEV *CastedMaxBECount = |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1668 | getTruncateOrZeroExtend(MaxBECount, Start->getType()); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1669 | const SCEV *RecastedMaxBECount = |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1670 | getTruncateOrZeroExtend(CastedMaxBECount, MaxBECount->getType()); |
| 1671 | if (MaxBECount == RecastedMaxBECount) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1672 | Type *WideTy = IntegerType::get(getContext(), BitWidth * 2); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1673 | // Check whether Start+Step*MaxBECount has no signed overflow. |
Dan Gohman | 007f504 | 2010-02-24 19:31:06 +0000 | [diff] [blame] | 1674 | const SCEV *SMul = getMulExpr(CastedMaxBECount, Step); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1675 | const SCEV *SAdd = getSignExtendExpr(getAddExpr(Start, SMul), WideTy); |
| 1676 | const SCEV *WideStart = getSignExtendExpr(Start, WideTy); |
| 1677 | const SCEV *WideMaxBECount = |
| 1678 | getZeroExtendExpr(CastedMaxBECount, WideTy); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1679 | const SCEV *OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1680 | getAddExpr(WideStart, |
| 1681 | getMulExpr(WideMaxBECount, |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1682 | getSignExtendExpr(Step, WideTy))); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1683 | if (SAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1684 | // Cache knowledge of AR NSW, which is propagated to this AddRec. |
| 1685 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1686 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1687 | return getAddRecExpr( |
| 1688 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this), |
| 1689 | getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1690 | } |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1691 | // Similar to above, only this time treat the step value as unsigned. |
| 1692 | // This covers loops that count up with an unsigned step. |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1693 | OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1694 | getAddExpr(WideStart, |
| 1695 | getMulExpr(WideMaxBECount, |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1696 | getZeroExtendExpr(Step, WideTy))); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1697 | if (SAdd == OperandExtendedAdd) { |
Sanjoy Das | bf5d870 | 2015-02-09 18:34:55 +0000 | [diff] [blame] | 1698 | // If AR wraps around then |
| 1699 | // |
| 1700 | // abs(Step) * MaxBECount > unsigned-max(AR->getType()) |
| 1701 | // => SAdd != OperandExtendedAdd |
| 1702 | // |
| 1703 | // Thus (AR is not NW => SAdd != OperandExtendedAdd) <=> |
| 1704 | // (SAdd == OperandExtendedAdd => AR is NW) |
| 1705 | |
| 1706 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
| 1707 | |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1708 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1709 | return getAddRecExpr( |
| 1710 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this), |
| 1711 | getZeroExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1712 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1713 | } |
| 1714 | |
| 1715 | // If the backedge is guarded by a comparison with the pre-inc value |
| 1716 | // the addrec is safe. Also, if the entry is guarded by a comparison |
| 1717 | // with the start value and the backedge is guarded by a comparison |
| 1718 | // with the post-inc value, the addrec is safe. |
Andrew Trick | 812276e | 2011-05-31 21:17:47 +0000 | [diff] [blame] | 1719 | ICmpInst::Predicate Pred; |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1720 | const SCEV *OverflowLimit = |
| 1721 | getSignedOverflowLimitForStep(Step, &Pred, this); |
Andrew Trick | 812276e | 2011-05-31 21:17:47 +0000 | [diff] [blame] | 1722 | if (OverflowLimit && |
| 1723 | (isLoopBackedgeGuardedByCond(L, Pred, AR, OverflowLimit) || |
| 1724 | (isLoopEntryGuardedByCond(L, Pred, Start, OverflowLimit) && |
| 1725 | isLoopBackedgeGuardedByCond(L, Pred, AR->getPostIncExpr(*this), |
| 1726 | OverflowLimit)))) { |
| 1727 | // Cache knowledge of AR NSW, then propagate NSW to the wide AddRec. |
| 1728 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1729 | return getAddRecExpr( |
| 1730 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this), |
| 1731 | getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1732 | } |
| 1733 | } |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1734 | // If Start and Step are constants, check if we can apply this |
| 1735 | // transformation: |
| 1736 | // sext{C1,+,C2} --> C1 + sext{0,+,C2} if C1 < C2 |
| 1737 | auto SC1 = dyn_cast<SCEVConstant>(Start); |
| 1738 | auto SC2 = dyn_cast<SCEVConstant>(Step); |
| 1739 | if (SC1 && SC2) { |
Michael Zolotukhin | 265dfa4 | 2014-05-26 14:49:46 +0000 | [diff] [blame] | 1740 | const APInt &C1 = SC1->getValue()->getValue(); |
| 1741 | const APInt &C2 = SC2->getValue()->getValue(); |
| 1742 | if (C1.isStrictlyPositive() && C2.isStrictlyPositive() && C2.ugt(C1) && |
| 1743 | C2.isPowerOf2()) { |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1744 | Start = getSignExtendExpr(Start, Ty); |
| 1745 | const SCEV *NewAR = getAddRecExpr(getConstant(AR->getType(), 0), Step, |
| 1746 | L, AR->getNoWrapFlags()); |
| 1747 | return getAddExpr(Start, getSignExtendExpr(NewAR, Ty)); |
| 1748 | } |
| 1749 | } |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1750 | |
| 1751 | if (proveNoWrapByVaryingStart<SCEVSignExtendExpr>(Start, Step, L)) { |
| 1752 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
| 1753 | return getAddRecExpr( |
| 1754 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this), |
| 1755 | getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags()); |
| 1756 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1757 | } |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1758 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1759 | // The cast wasn't folded; create an explicit cast node. |
| 1760 | // Recompute the insert position, as it may have been invalidated. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1761 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 1762 | SCEV *S = new (SCEVAllocator) SCEVSignExtendExpr(ID.Intern(SCEVAllocator), |
| 1763 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1764 | UniqueSCEVs.InsertNode(S, IP); |
| 1765 | return S; |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1766 | } |
| 1767 | |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1768 | /// getAnyExtendExpr - Return a SCEV for the given operand extended with |
| 1769 | /// unspecified bits out to the given type. |
| 1770 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1771 | const SCEV *ScalarEvolution::getAnyExtendExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1772 | Type *Ty) { |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1773 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
| 1774 | "This is not an extending conversion!"); |
| 1775 | assert(isSCEVable(Ty) && |
| 1776 | "This is not a conversion to a SCEVable type!"); |
| 1777 | Ty = getEffectiveSCEVType(Ty); |
| 1778 | |
| 1779 | // Sign-extend negative constants. |
| 1780 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
| 1781 | if (SC->getValue()->getValue().isNegative()) |
| 1782 | return getSignExtendExpr(Op, Ty); |
| 1783 | |
| 1784 | // Peel off a truncate cast. |
| 1785 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(Op)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1786 | const SCEV *NewOp = T->getOperand(); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1787 | if (getTypeSizeInBits(NewOp->getType()) < getTypeSizeInBits(Ty)) |
| 1788 | return getAnyExtendExpr(NewOp, Ty); |
| 1789 | return getTruncateOrNoop(NewOp, Ty); |
| 1790 | } |
| 1791 | |
| 1792 | // Next try a zext cast. If the cast is folded, use it. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1793 | const SCEV *ZExt = getZeroExtendExpr(Op, Ty); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1794 | if (!isa<SCEVZeroExtendExpr>(ZExt)) |
| 1795 | return ZExt; |
| 1796 | |
| 1797 | // Next try a sext cast. If the cast is folded, use it. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1798 | const SCEV *SExt = getSignExtendExpr(Op, Ty); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1799 | if (!isa<SCEVSignExtendExpr>(SExt)) |
| 1800 | return SExt; |
| 1801 | |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 1802 | // Force the cast to be folded into the operands of an addrec. |
| 1803 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) { |
| 1804 | SmallVector<const SCEV *, 4> Ops; |
Tobias Grosser | 924221c | 2014-05-07 06:07:47 +0000 | [diff] [blame] | 1805 | for (const SCEV *Op : AR->operands()) |
| 1806 | Ops.push_back(getAnyExtendExpr(Op, Ty)); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1807 | return getAddRecExpr(Ops, AR->getLoop(), SCEV::FlagNW); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 1808 | } |
| 1809 | |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 1810 | // If the expression is obviously signed, use the sext cast value. |
| 1811 | if (isa<SCEVSMaxExpr>(Op)) |
| 1812 | return SExt; |
| 1813 | |
| 1814 | // Absent any other information, use the zext cast value. |
| 1815 | return ZExt; |
| 1816 | } |
| 1817 | |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1818 | /// CollectAddOperandsWithScales - Process the given Ops list, which is |
| 1819 | /// a list of operands to be added under the given scale, update the given |
| 1820 | /// map. This is a helper function for getAddRecExpr. As an example of |
| 1821 | /// what it does, given a sequence of operands that would form an add |
| 1822 | /// expression like this: |
| 1823 | /// |
Tobias Grosser | ba49e42 | 2014-03-05 10:37:17 +0000 | [diff] [blame] | 1824 | /// m + n + 13 + (A * (o + p + (B * (q + m + 29)))) + r + (-1 * r) |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1825 | /// |
| 1826 | /// where A and B are constants, update the map with these values: |
| 1827 | /// |
| 1828 | /// (m, 1+A*B), (n, 1), (o, A), (p, A), (q, A*B), (r, 0) |
| 1829 | /// |
| 1830 | /// and add 13 + A*B*29 to AccumulatedConstant. |
| 1831 | /// This will allow getAddRecExpr to produce this: |
| 1832 | /// |
| 1833 | /// 13+A*B*29 + n + (m * (1+A*B)) + ((o + p) * A) + (q * A*B) |
| 1834 | /// |
| 1835 | /// This form often exposes folding opportunities that are hidden in |
| 1836 | /// the original operand list. |
| 1837 | /// |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 1838 | /// Return true iff it appears that any interesting folding opportunities |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1839 | /// may be exposed. This helps getAddRecExpr short-circuit extra work in |
| 1840 | /// the common case where no interesting opportunities are present, and |
| 1841 | /// is also used as a check to avoid infinite recursion. |
| 1842 | /// |
| 1843 | static bool |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1844 | CollectAddOperandsWithScales(DenseMap<const SCEV *, APInt> &M, |
Craig Topper | 2cd5ff8 | 2013-07-11 16:22:38 +0000 | [diff] [blame] | 1845 | SmallVectorImpl<const SCEV *> &NewOps, |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1846 | APInt &AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 1847 | const SCEV *const *Ops, size_t NumOperands, |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1848 | const APInt &Scale, |
| 1849 | ScalarEvolution &SE) { |
| 1850 | bool Interesting = false; |
| 1851 | |
Dan Gohman | 4507304 | 2010-06-18 19:12:32 +0000 | [diff] [blame] | 1852 | // Iterate over the add operands. They are sorted, with constants first. |
| 1853 | unsigned i = 0; |
| 1854 | while (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[i])) { |
| 1855 | ++i; |
| 1856 | // Pull a buried constant out to the outside. |
| 1857 | if (Scale != 1 || AccumulatedConstant != 0 || C->getValue()->isZero()) |
| 1858 | Interesting = true; |
| 1859 | AccumulatedConstant += Scale * C->getValue()->getValue(); |
| 1860 | } |
| 1861 | |
| 1862 | // Next comes everything else. We're especially interested in multiplies |
| 1863 | // here, but they're in the middle, so just visit the rest with one loop. |
| 1864 | for (; i != NumOperands; ++i) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1865 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Ops[i]); |
| 1866 | if (Mul && isa<SCEVConstant>(Mul->getOperand(0))) { |
| 1867 | APInt NewScale = |
| 1868 | Scale * cast<SCEVConstant>(Mul->getOperand(0))->getValue()->getValue(); |
| 1869 | if (Mul->getNumOperands() == 2 && isa<SCEVAddExpr>(Mul->getOperand(1))) { |
| 1870 | // A multiplication of a constant with another add; recurse. |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 1871 | const SCEVAddExpr *Add = cast<SCEVAddExpr>(Mul->getOperand(1)); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1872 | Interesting |= |
| 1873 | CollectAddOperandsWithScales(M, NewOps, AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 1874 | Add->op_begin(), Add->getNumOperands(), |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1875 | NewScale, SE); |
| 1876 | } else { |
| 1877 | // A multiplication of a constant with some other value. Update |
| 1878 | // the map. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1879 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin()+1, Mul->op_end()); |
| 1880 | const SCEV *Key = SE.getMulExpr(MulOps); |
| 1881 | std::pair<DenseMap<const SCEV *, APInt>::iterator, bool> Pair = |
Dan Gohman | e00beaa | 2009-06-29 18:25:52 +0000 | [diff] [blame] | 1882 | M.insert(std::make_pair(Key, NewScale)); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1883 | if (Pair.second) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1884 | NewOps.push_back(Pair.first->first); |
| 1885 | } else { |
| 1886 | Pair.first->second += NewScale; |
| 1887 | // The map already had an entry for this value, which may indicate |
| 1888 | // a folding opportunity. |
| 1889 | Interesting = true; |
| 1890 | } |
| 1891 | } |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1892 | } else { |
| 1893 | // An ordinary operand. Update the map. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1894 | std::pair<DenseMap<const SCEV *, APInt>::iterator, bool> Pair = |
Dan Gohman | e00beaa | 2009-06-29 18:25:52 +0000 | [diff] [blame] | 1895 | M.insert(std::make_pair(Ops[i], Scale)); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1896 | if (Pair.second) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 1897 | NewOps.push_back(Pair.first->first); |
| 1898 | } else { |
| 1899 | Pair.first->second += Scale; |
| 1900 | // The map already had an entry for this value, which may indicate |
| 1901 | // a folding opportunity. |
| 1902 | Interesting = true; |
| 1903 | } |
| 1904 | } |
| 1905 | } |
| 1906 | |
| 1907 | return Interesting; |
| 1908 | } |
| 1909 | |
| 1910 | namespace { |
| 1911 | struct APIntCompare { |
| 1912 | bool operator()(const APInt &LHS, const APInt &RHS) const { |
| 1913 | return LHS.ult(RHS); |
| 1914 | } |
| 1915 | }; |
| 1916 | } |
| 1917 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 1918 | // We're trying to construct a SCEV of type `Type' with `Ops' as operands and |
| 1919 | // `OldFlags' as can't-wrap behavior. Infer a more aggressive set of |
| 1920 | // can't-overflow flags for the operation if possible. |
| 1921 | static SCEV::NoWrapFlags |
| 1922 | StrengthenNoWrapFlags(ScalarEvolution *SE, SCEVTypes Type, |
| 1923 | const SmallVectorImpl<const SCEV *> &Ops, |
| 1924 | SCEV::NoWrapFlags OldFlags) { |
| 1925 | using namespace std::placeholders; |
| 1926 | |
| 1927 | bool CanAnalyze = |
| 1928 | Type == scAddExpr || Type == scAddRecExpr || Type == scMulExpr; |
| 1929 | (void)CanAnalyze; |
| 1930 | assert(CanAnalyze && "don't call from other places!"); |
| 1931 | |
| 1932 | int SignOrUnsignMask = SCEV::FlagNUW | SCEV::FlagNSW; |
| 1933 | SCEV::NoWrapFlags SignOrUnsignWrap = |
| 1934 | ScalarEvolution::maskFlags(OldFlags, SignOrUnsignMask); |
| 1935 | |
| 1936 | // If FlagNSW is true and all the operands are non-negative, infer FlagNUW. |
| 1937 | auto IsKnownNonNegative = |
| 1938 | std::bind(std::mem_fn(&ScalarEvolution::isKnownNonNegative), SE, _1); |
| 1939 | |
| 1940 | if (SignOrUnsignWrap == SCEV::FlagNSW && |
| 1941 | std::all_of(Ops.begin(), Ops.end(), IsKnownNonNegative)) |
| 1942 | return ScalarEvolution::setFlags(OldFlags, |
| 1943 | (SCEV::NoWrapFlags)SignOrUnsignMask); |
| 1944 | |
| 1945 | return OldFlags; |
| 1946 | } |
| 1947 | |
Dan Gohman | 4d5435d | 2009-05-24 23:45:28 +0000 | [diff] [blame] | 1948 | /// getAddExpr - Get a canonical add expression, or something simpler if |
| 1949 | /// possible. |
Dan Gohman | 816fe0a | 2009-10-09 00:10:36 +0000 | [diff] [blame] | 1950 | const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1951 | SCEV::NoWrapFlags Flags) { |
| 1952 | assert(!(Flags & ~(SCEV::FlagNUW | SCEV::FlagNSW)) && |
| 1953 | "only nuw or nsw allowed"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1954 | assert(!Ops.empty() && "Cannot get empty add!"); |
Chris Lattner | 74498e1 | 2004-04-07 16:16:11 +0000 | [diff] [blame] | 1955 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 1956 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1957 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 1958 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | 9136d9f | 2010-06-18 19:09:27 +0000 | [diff] [blame] | 1959 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 1960 | "SCEVAddExpr operand types don't match!"); |
| 1961 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1962 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 1963 | Flags = StrengthenNoWrapFlags(this, scAddExpr, Ops, Flags); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 1964 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1965 | // Sort by complexity, this groups all similar expression types together. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 1966 | GroupByComplexity(Ops, &LI); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1967 | |
| 1968 | // If there are any constants, fold them together. |
| 1969 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1970 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1971 | ++Idx; |
Chris Lattner | 74498e1 | 2004-04-07 16:16:11 +0000 | [diff] [blame] | 1972 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1973 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1974 | // We found two constants, fold them together! |
Dan Gohman | 0652fd5 | 2009-06-14 22:47:23 +0000 | [diff] [blame] | 1975 | Ops[0] = getConstant(LHSC->getValue()->getValue() + |
| 1976 | RHSC->getValue()->getValue()); |
Dan Gohman | 011cf68 | 2009-06-14 22:53:57 +0000 | [diff] [blame] | 1977 | if (Ops.size() == 2) return Ops[0]; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 1978 | Ops.erase(Ops.begin()+1); // Erase the folded element |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 1979 | LHSC = cast<SCEVConstant>(Ops[0]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1980 | } |
| 1981 | |
| 1982 | // If we are left with a constant zero being added, strip it off. |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 1983 | if (LHSC->getValue()->isZero()) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1984 | Ops.erase(Ops.begin()); |
| 1985 | --Idx; |
| 1986 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1987 | |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 1988 | if (Ops.size() == 1) return Ops[0]; |
| 1989 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 1990 | |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 1991 | // Okay, check to see if the same value occurs in the operand list more than |
| 1992 | // once. If so, merge them together into an multiply expression. Since we |
| 1993 | // sorted the list, these values are required to be adjacent. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1994 | Type *Ty = Ops[0]->getType(); |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 1995 | bool FoundMatch = false; |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 1996 | for (unsigned i = 0, e = Ops.size(); i != e-1; ++i) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1997 | if (Ops[i] == Ops[i+1]) { // X + Y + Y --> X + Y*2 |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 1998 | // Scan ahead to count how many equal operands there are. |
| 1999 | unsigned Count = 2; |
| 2000 | while (i+Count != e && Ops[i+Count] == Ops[i]) |
| 2001 | ++Count; |
| 2002 | // Merge the values into a multiply. |
| 2003 | const SCEV *Scale = getConstant(Ty, Count); |
| 2004 | const SCEV *Mul = getMulExpr(Scale, Ops[i]); |
| 2005 | if (Ops.size() == Count) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2006 | return Mul; |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2007 | Ops[i] = Mul; |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2008 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+Count); |
Dan Gohman | fe22f1d | 2010-08-28 00:39:27 +0000 | [diff] [blame] | 2009 | --i; e -= Count - 1; |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2010 | FoundMatch = true; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2011 | } |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2012 | if (FoundMatch) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2013 | return getAddExpr(Ops, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2014 | |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2015 | // Check for truncates. If all the operands are truncated from the same |
| 2016 | // type, see if factoring out the truncate would permit the result to be |
| 2017 | // folded. eg., trunc(x) + m*trunc(n) --> trunc(x + trunc(m)*n) |
| 2018 | // if the contents of the resulting outer trunc fold to something simple. |
| 2019 | for (; Idx < Ops.size() && isa<SCEVTruncateExpr>(Ops[Idx]); ++Idx) { |
| 2020 | const SCEVTruncateExpr *Trunc = cast<SCEVTruncateExpr>(Ops[Idx]); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2021 | Type *DstType = Trunc->getType(); |
| 2022 | Type *SrcType = Trunc->getOperand()->getType(); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2023 | SmallVector<const SCEV *, 8> LargeOps; |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2024 | bool Ok = true; |
| 2025 | // Check all the operands to see if they can be represented in the |
| 2026 | // source type of the truncate. |
| 2027 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) { |
| 2028 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(Ops[i])) { |
| 2029 | if (T->getOperand()->getType() != SrcType) { |
| 2030 | Ok = false; |
| 2031 | break; |
| 2032 | } |
| 2033 | LargeOps.push_back(T->getOperand()); |
| 2034 | } else if (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[i])) { |
Dan Gohman | ff3174e | 2010-04-23 01:51:29 +0000 | [diff] [blame] | 2035 | LargeOps.push_back(getAnyExtendExpr(C, SrcType)); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2036 | } else if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(Ops[i])) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2037 | SmallVector<const SCEV *, 8> LargeMulOps; |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2038 | for (unsigned j = 0, f = M->getNumOperands(); j != f && Ok; ++j) { |
| 2039 | if (const SCEVTruncateExpr *T = |
| 2040 | dyn_cast<SCEVTruncateExpr>(M->getOperand(j))) { |
| 2041 | if (T->getOperand()->getType() != SrcType) { |
| 2042 | Ok = false; |
| 2043 | break; |
| 2044 | } |
| 2045 | LargeMulOps.push_back(T->getOperand()); |
| 2046 | } else if (const SCEVConstant *C = |
| 2047 | dyn_cast<SCEVConstant>(M->getOperand(j))) { |
Dan Gohman | ff3174e | 2010-04-23 01:51:29 +0000 | [diff] [blame] | 2048 | LargeMulOps.push_back(getAnyExtendExpr(C, SrcType)); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2049 | } else { |
| 2050 | Ok = false; |
| 2051 | break; |
| 2052 | } |
| 2053 | } |
| 2054 | if (Ok) |
| 2055 | LargeOps.push_back(getMulExpr(LargeMulOps)); |
| 2056 | } else { |
| 2057 | Ok = false; |
| 2058 | break; |
| 2059 | } |
| 2060 | } |
| 2061 | if (Ok) { |
| 2062 | // Evaluate the expression in the larger type. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2063 | const SCEV *Fold = getAddExpr(LargeOps, Flags); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2064 | // If it folds to something simple, use it. Otherwise, don't. |
| 2065 | if (isa<SCEVConstant>(Fold) || isa<SCEVUnknown>(Fold)) |
| 2066 | return getTruncateExpr(Fold, DstType); |
| 2067 | } |
| 2068 | } |
| 2069 | |
| 2070 | // Skip past any other cast SCEVs. |
Dan Gohman | eed125f | 2007-06-18 19:30:09 +0000 | [diff] [blame] | 2071 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddExpr) |
| 2072 | ++Idx; |
| 2073 | |
| 2074 | // If there are add operands they would be next. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2075 | if (Idx < Ops.size()) { |
| 2076 | bool DeletedAdd = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2077 | while (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2078 | // If we have an add, expand the add operands onto the end of the operands |
| 2079 | // list. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2080 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 2081 | Ops.append(Add->op_begin(), Add->op_end()); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2082 | DeletedAdd = true; |
| 2083 | } |
| 2084 | |
| 2085 | // If we deleted at least one add, we added operands to the end of the list, |
| 2086 | // and they are not necessarily sorted. Recurse to resort and resimplify |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 2087 | // any operands we just acquired. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2088 | if (DeletedAdd) |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2089 | return getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2090 | } |
| 2091 | |
| 2092 | // Skip over the add expression until we get to a multiply. |
| 2093 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scMulExpr) |
| 2094 | ++Idx; |
| 2095 | |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2096 | // Check to see if there are any folding opportunities present with |
| 2097 | // operands multiplied by constant values. |
| 2098 | if (Idx < Ops.size() && isa<SCEVMulExpr>(Ops[Idx])) { |
| 2099 | uint64_t BitWidth = getTypeSizeInBits(Ty); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2100 | DenseMap<const SCEV *, APInt> M; |
| 2101 | SmallVector<const SCEV *, 8> NewOps; |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2102 | APInt AccumulatedConstant(BitWidth, 0); |
| 2103 | if (CollectAddOperandsWithScales(M, NewOps, AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2104 | Ops.data(), Ops.size(), |
| 2105 | APInt(BitWidth, 1), *this)) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2106 | // Some interesting folding opportunity is present, so its worthwhile to |
| 2107 | // re-generate the operands list. Group the operands by constant scale, |
| 2108 | // to avoid multiplying by the same constant scale multiple times. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2109 | std::map<APInt, SmallVector<const SCEV *, 4>, APIntCompare> MulOpLists; |
Craig Topper | 31ee586 | 2013-07-03 15:07:05 +0000 | [diff] [blame] | 2110 | for (SmallVectorImpl<const SCEV *>::const_iterator I = NewOps.begin(), |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2111 | E = NewOps.end(); I != E; ++I) |
| 2112 | MulOpLists[M.find(*I)->second].push_back(*I); |
| 2113 | // Re-generate the operands list. |
| 2114 | Ops.clear(); |
| 2115 | if (AccumulatedConstant != 0) |
| 2116 | Ops.push_back(getConstant(AccumulatedConstant)); |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2117 | for (std::map<APInt, SmallVector<const SCEV *, 4>, APIntCompare>::iterator |
| 2118 | I = MulOpLists.begin(), E = MulOpLists.end(); I != E; ++I) |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2119 | if (I->first != 0) |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2120 | Ops.push_back(getMulExpr(getConstant(I->first), |
| 2121 | getAddExpr(I->second))); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2122 | if (Ops.empty()) |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 2123 | return getConstant(Ty, 0); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2124 | if (Ops.size() == 1) |
| 2125 | return Ops[0]; |
| 2126 | return getAddExpr(Ops); |
| 2127 | } |
| 2128 | } |
| 2129 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2130 | // If we are adding something to a multiply expression, make sure the |
| 2131 | // something is not already an operand of the multiply. If so, merge it into |
| 2132 | // the multiply. |
| 2133 | for (; Idx < Ops.size() && isa<SCEVMulExpr>(Ops[Idx]); ++Idx) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2134 | const SCEVMulExpr *Mul = cast<SCEVMulExpr>(Ops[Idx]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2135 | for (unsigned MulOp = 0, e = Mul->getNumOperands(); MulOp != e; ++MulOp) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2136 | const SCEV *MulOpSCEV = Mul->getOperand(MulOp); |
Dan Gohman | 157847f | 2010-08-12 14:52:55 +0000 | [diff] [blame] | 2137 | if (isa<SCEVConstant>(MulOpSCEV)) |
| 2138 | continue; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2139 | for (unsigned AddOp = 0, e = Ops.size(); AddOp != e; ++AddOp) |
Dan Gohman | 157847f | 2010-08-12 14:52:55 +0000 | [diff] [blame] | 2140 | if (MulOpSCEV == Ops[AddOp]) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2141 | // Fold W + X + (X * Y * Z) --> W + (X * ((Y*Z)+1)) |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2142 | const SCEV *InnerMul = Mul->getOperand(MulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2143 | if (Mul->getNumOperands() != 2) { |
| 2144 | // If the multiply has more than two operands, we must get the |
| 2145 | // Y*Z term. |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2146 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin(), |
| 2147 | Mul->op_begin()+MulOp); |
| 2148 | MulOps.append(Mul->op_begin()+MulOp+1, Mul->op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2149 | InnerMul = getMulExpr(MulOps); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2150 | } |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 2151 | const SCEV *One = getConstant(Ty, 1); |
Dan Gohman | cf32f2b | 2010-08-13 20:17:14 +0000 | [diff] [blame] | 2152 | const SCEV *AddOne = getAddExpr(One, InnerMul); |
Dan Gohman | 157847f | 2010-08-12 14:52:55 +0000 | [diff] [blame] | 2153 | const SCEV *OuterMul = getMulExpr(AddOne, MulOpSCEV); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2154 | if (Ops.size() == 2) return OuterMul; |
| 2155 | if (AddOp < Idx) { |
| 2156 | Ops.erase(Ops.begin()+AddOp); |
| 2157 | Ops.erase(Ops.begin()+Idx-1); |
| 2158 | } else { |
| 2159 | Ops.erase(Ops.begin()+Idx); |
| 2160 | Ops.erase(Ops.begin()+AddOp-1); |
| 2161 | } |
| 2162 | Ops.push_back(OuterMul); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2163 | return getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2164 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 2165 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2166 | // Check this multiply against other multiplies being added together. |
| 2167 | for (unsigned OtherMulIdx = Idx+1; |
| 2168 | OtherMulIdx < Ops.size() && isa<SCEVMulExpr>(Ops[OtherMulIdx]); |
| 2169 | ++OtherMulIdx) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2170 | const SCEVMulExpr *OtherMul = cast<SCEVMulExpr>(Ops[OtherMulIdx]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2171 | // If MulOp occurs in OtherMul, we can fold the two multiplies |
| 2172 | // together. |
| 2173 | for (unsigned OMulOp = 0, e = OtherMul->getNumOperands(); |
| 2174 | OMulOp != e; ++OMulOp) |
| 2175 | if (OtherMul->getOperand(OMulOp) == MulOpSCEV) { |
| 2176 | // Fold X + (A*B*C) + (A*D*E) --> X + (A*(B*C+D*E)) |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2177 | const SCEV *InnerMul1 = Mul->getOperand(MulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2178 | if (Mul->getNumOperands() != 2) { |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2179 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin(), |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2180 | Mul->op_begin()+MulOp); |
| 2181 | MulOps.append(Mul->op_begin()+MulOp+1, Mul->op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2182 | InnerMul1 = getMulExpr(MulOps); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2183 | } |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2184 | const SCEV *InnerMul2 = OtherMul->getOperand(OMulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2185 | if (OtherMul->getNumOperands() != 2) { |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2186 | SmallVector<const SCEV *, 4> MulOps(OtherMul->op_begin(), |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2187 | OtherMul->op_begin()+OMulOp); |
| 2188 | MulOps.append(OtherMul->op_begin()+OMulOp+1, OtherMul->op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2189 | InnerMul2 = getMulExpr(MulOps); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2190 | } |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2191 | const SCEV *InnerMulSum = getAddExpr(InnerMul1,InnerMul2); |
| 2192 | const SCEV *OuterMul = getMulExpr(MulOpSCEV, InnerMulSum); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2193 | if (Ops.size() == 2) return OuterMul; |
Dan Gohman | aabfc52 | 2010-08-31 22:50:31 +0000 | [diff] [blame] | 2194 | Ops.erase(Ops.begin()+Idx); |
| 2195 | Ops.erase(Ops.begin()+OtherMulIdx-1); |
| 2196 | Ops.push_back(OuterMul); |
| 2197 | return getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2198 | } |
| 2199 | } |
| 2200 | } |
| 2201 | } |
| 2202 | |
| 2203 | // If there are any add recurrences in the operands list, see if any other |
| 2204 | // added values are loop invariant. If so, we can fold them into the |
| 2205 | // recurrence. |
| 2206 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddRecExpr) |
| 2207 | ++Idx; |
| 2208 | |
| 2209 | // Scan over all recurrences, trying to fold loop invariants into them. |
| 2210 | for (; Idx < Ops.size() && isa<SCEVAddRecExpr>(Ops[Idx]); ++Idx) { |
| 2211 | // Scan all of the other operands to this add and add them to the vector if |
| 2212 | // they are loop invariant w.r.t. the recurrence. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2213 | SmallVector<const SCEV *, 8> LIOps; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2214 | const SCEVAddRecExpr *AddRec = cast<SCEVAddRecExpr>(Ops[Idx]); |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 2215 | const Loop *AddRecLoop = AddRec->getLoop(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2216 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 2217 | if (isLoopInvariant(Ops[i], AddRecLoop)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2218 | LIOps.push_back(Ops[i]); |
| 2219 | Ops.erase(Ops.begin()+i); |
| 2220 | --i; --e; |
| 2221 | } |
| 2222 | |
| 2223 | // If we found some loop invariants, fold them into the recurrence. |
| 2224 | if (!LIOps.empty()) { |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 2225 | // NLI + LI + {Start,+,Step} --> NLI + {LI+Start,+,Step} |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2226 | LIOps.push_back(AddRec->getStart()); |
| 2227 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2228 | SmallVector<const SCEV *, 4> AddRecOps(AddRec->op_begin(), |
Dan Gohman | 7a2dab8 | 2009-12-18 03:57:04 +0000 | [diff] [blame] | 2229 | AddRec->op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2230 | AddRecOps[0] = getAddExpr(LIOps); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2231 | |
Dan Gohman | 1620613 | 2010-06-30 07:16:37 +0000 | [diff] [blame] | 2232 | // Build the new addrec. Propagate the NUW and NSW flags if both the |
Eric Christopher | 23bf3ba | 2011-01-11 09:02:09 +0000 | [diff] [blame] | 2233 | // outer add and the inner addrec are guaranteed to have no overflow. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2234 | // Always propagate NW. |
| 2235 | Flags = AddRec->getNoWrapFlags(setFlags(Flags, SCEV::FlagNW)); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2236 | const SCEV *NewRec = getAddRecExpr(AddRecOps, AddRecLoop, Flags); |
Dan Gohman | 51f1305 | 2009-12-18 18:45:31 +0000 | [diff] [blame] | 2237 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2238 | // If all of the other operands were loop invariant, we are done. |
| 2239 | if (Ops.size() == 1) return NewRec; |
| 2240 | |
Nick Lewycky | db66b82 | 2011-09-06 05:08:09 +0000 | [diff] [blame] | 2241 | // Otherwise, add the folded AddRec by the non-invariant parts. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2242 | for (unsigned i = 0;; ++i) |
| 2243 | if (Ops[i] == AddRec) { |
| 2244 | Ops[i] = NewRec; |
| 2245 | break; |
| 2246 | } |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2247 | return getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2248 | } |
| 2249 | |
| 2250 | // Okay, if there weren't any loop invariants to be folded, check to see if |
| 2251 | // there are multiple AddRec's with the same loop induction variable being |
| 2252 | // added together. If so, we can fold them. |
| 2253 | for (unsigned OtherIdx = Idx+1; |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2254 | OtherIdx < Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
| 2255 | ++OtherIdx) |
| 2256 | if (AddRecLoop == cast<SCEVAddRecExpr>(Ops[OtherIdx])->getLoop()) { |
| 2257 | // Other + {A,+,B}<L> + {C,+,D}<L> --> Other + {A+C,+,B+D}<L> |
| 2258 | SmallVector<const SCEV *, 4> AddRecOps(AddRec->op_begin(), |
| 2259 | AddRec->op_end()); |
| 2260 | for (; OtherIdx != Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
| 2261 | ++OtherIdx) |
Dan Gohman | 028c181 | 2010-08-29 14:53:34 +0000 | [diff] [blame] | 2262 | if (const SCEVAddRecExpr *OtherAddRec = |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2263 | dyn_cast<SCEVAddRecExpr>(Ops[OtherIdx])) |
Dan Gohman | 028c181 | 2010-08-29 14:53:34 +0000 | [diff] [blame] | 2264 | if (OtherAddRec->getLoop() == AddRecLoop) { |
| 2265 | for (unsigned i = 0, e = OtherAddRec->getNumOperands(); |
| 2266 | i != e; ++i) { |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2267 | if (i >= AddRecOps.size()) { |
Dan Gohman | 028c181 | 2010-08-29 14:53:34 +0000 | [diff] [blame] | 2268 | AddRecOps.append(OtherAddRec->op_begin()+i, |
| 2269 | OtherAddRec->op_end()); |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2270 | break; |
| 2271 | } |
Dan Gohman | 028c181 | 2010-08-29 14:53:34 +0000 | [diff] [blame] | 2272 | AddRecOps[i] = getAddExpr(AddRecOps[i], |
| 2273 | OtherAddRec->getOperand(i)); |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2274 | } |
| 2275 | Ops.erase(Ops.begin() + OtherIdx); --OtherIdx; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2276 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2277 | // Step size has changed, so we cannot guarantee no self-wraparound. |
| 2278 | Ops[Idx] = getAddRecExpr(AddRecOps, AddRecLoop, SCEV::FlagAnyWrap); |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2279 | return getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2280 | } |
| 2281 | |
| 2282 | // Otherwise couldn't fold anything into this recurrence. Move onto the |
| 2283 | // next one. |
| 2284 | } |
| 2285 | |
| 2286 | // Okay, it looks like we really DO need an add expr. Check to see if we |
| 2287 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2288 | FoldingSetNodeID ID; |
| 2289 | ID.AddInteger(scAddExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2290 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 2291 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2292 | void *IP = nullptr; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2293 | SCEVAddExpr *S = |
| 2294 | static_cast<SCEVAddExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 2295 | if (!S) { |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2296 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 2297 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 2298 | S = new (SCEVAllocator) SCEVAddExpr(ID.Intern(SCEVAllocator), |
| 2299 | O, Ops.size()); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2300 | UniqueSCEVs.InsertNode(S, IP); |
| 2301 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2302 | S->setNoWrapFlags(Flags); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2303 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2304 | } |
| 2305 | |
Nick Lewycky | 287682e | 2011-10-04 06:51:26 +0000 | [diff] [blame] | 2306 | static uint64_t umul_ov(uint64_t i, uint64_t j, bool &Overflow) { |
| 2307 | uint64_t k = i*j; |
| 2308 | if (j > 1 && k / j != i) Overflow = true; |
| 2309 | return k; |
| 2310 | } |
| 2311 | |
| 2312 | /// Compute the result of "n choose k", the binomial coefficient. If an |
| 2313 | /// intermediate computation overflows, Overflow will be set and the return will |
Benjamin Kramer | bde9176 | 2012-06-02 10:20:22 +0000 | [diff] [blame] | 2314 | /// be garbage. Overflow is not cleared on absence of overflow. |
Nick Lewycky | 287682e | 2011-10-04 06:51:26 +0000 | [diff] [blame] | 2315 | static uint64_t Choose(uint64_t n, uint64_t k, bool &Overflow) { |
| 2316 | // We use the multiplicative formula: |
| 2317 | // n(n-1)(n-2)...(n-(k-1)) / k(k-1)(k-2)...1 . |
| 2318 | // At each iteration, we take the n-th term of the numeral and divide by the |
| 2319 | // (k-n)th term of the denominator. This division will always produce an |
| 2320 | // integral result, and helps reduce the chance of overflow in the |
| 2321 | // intermediate computations. However, we can still overflow even when the |
| 2322 | // final result would fit. |
| 2323 | |
| 2324 | if (n == 0 || n == k) return 1; |
| 2325 | if (k > n) return 0; |
| 2326 | |
| 2327 | if (k > n/2) |
| 2328 | k = n-k; |
| 2329 | |
| 2330 | uint64_t r = 1; |
| 2331 | for (uint64_t i = 1; i <= k; ++i) { |
| 2332 | r = umul_ov(r, n-(i-1), Overflow); |
| 2333 | r /= i; |
| 2334 | } |
| 2335 | return r; |
| 2336 | } |
| 2337 | |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2338 | /// Determine if any of the operands in this SCEV are a constant or if |
| 2339 | /// any of the add or multiply expressions in this SCEV contain a constant. |
| 2340 | static bool containsConstantSomewhere(const SCEV *StartExpr) { |
| 2341 | SmallVector<const SCEV *, 4> Ops; |
| 2342 | Ops.push_back(StartExpr); |
| 2343 | while (!Ops.empty()) { |
| 2344 | const SCEV *CurrentExpr = Ops.pop_back_val(); |
| 2345 | if (isa<SCEVConstant>(*CurrentExpr)) |
| 2346 | return true; |
| 2347 | |
| 2348 | if (isa<SCEVAddExpr>(*CurrentExpr) || isa<SCEVMulExpr>(*CurrentExpr)) { |
| 2349 | const auto *CurrentNAry = cast<SCEVNAryExpr>(CurrentExpr); |
Benjamin Kramer | 6cd780f | 2015-02-17 15:29:18 +0000 | [diff] [blame] | 2350 | Ops.append(CurrentNAry->op_begin(), CurrentNAry->op_end()); |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2351 | } |
| 2352 | } |
| 2353 | return false; |
| 2354 | } |
| 2355 | |
Dan Gohman | 4d5435d | 2009-05-24 23:45:28 +0000 | [diff] [blame] | 2356 | /// getMulExpr - Get a canonical multiply expression, or something simpler if |
| 2357 | /// possible. |
Dan Gohman | 816fe0a | 2009-10-09 00:10:36 +0000 | [diff] [blame] | 2358 | const SCEV *ScalarEvolution::getMulExpr(SmallVectorImpl<const SCEV *> &Ops, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2359 | SCEV::NoWrapFlags Flags) { |
| 2360 | assert(Flags == maskFlags(Flags, SCEV::FlagNUW | SCEV::FlagNSW) && |
| 2361 | "only nuw or nsw allowed"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2362 | assert(!Ops.empty() && "Cannot get empty mul!"); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2363 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2364 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2365 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2366 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 2367 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2368 | "SCEVMulExpr operand types don't match!"); |
| 2369 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2370 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2371 | Flags = StrengthenNoWrapFlags(this, scMulExpr, Ops, Flags); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2372 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2373 | // Sort by complexity, this groups all similar expression types together. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 2374 | GroupByComplexity(Ops, &LI); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2375 | |
| 2376 | // If there are any constants, fold them together. |
| 2377 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2378 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2379 | |
| 2380 | // C1*(C2+V) -> C1*C2 + C1*V |
| 2381 | if (Ops.size() == 2) |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2382 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[1])) |
| 2383 | // If any of Add's ops are Adds or Muls with a constant, |
| 2384 | // apply this transformation as well. |
| 2385 | if (Add->getNumOperands() == 2) |
| 2386 | if (containsConstantSomewhere(Add)) |
| 2387 | return getAddExpr(getMulExpr(LHSC, Add->getOperand(0)), |
| 2388 | getMulExpr(LHSC, Add->getOperand(1))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2389 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2390 | ++Idx; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2391 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2392 | // We found two constants, fold them together! |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 2393 | ConstantInt *Fold = ConstantInt::get(getContext(), |
| 2394 | LHSC->getValue()->getValue() * |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 2395 | RHSC->getValue()->getValue()); |
| 2396 | Ops[0] = getConstant(Fold); |
| 2397 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 2398 | if (Ops.size() == 1) return Ops[0]; |
| 2399 | LHSC = cast<SCEVConstant>(Ops[0]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2400 | } |
| 2401 | |
| 2402 | // If we are left with a constant one being multiplied, strip it off. |
| 2403 | if (cast<SCEVConstant>(Ops[0])->getValue()->equalsInt(1)) { |
| 2404 | Ops.erase(Ops.begin()); |
| 2405 | --Idx; |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 2406 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isZero()) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2407 | // If we have a multiply of zero, it will always be zero. |
| 2408 | return Ops[0]; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2409 | } else if (Ops[0]->isAllOnesValue()) { |
| 2410 | // If we have a mul by -1 of an add, try distributing the -1 among the |
| 2411 | // add operands. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2412 | if (Ops.size() == 2) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2413 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[1])) { |
| 2414 | SmallVector<const SCEV *, 4> NewOps; |
| 2415 | bool AnyFolded = false; |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2416 | for (SCEVAddRecExpr::op_iterator I = Add->op_begin(), |
| 2417 | E = Add->op_end(); I != E; ++I) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2418 | const SCEV *Mul = getMulExpr(Ops[0], *I); |
| 2419 | if (!isa<SCEVMulExpr>(Mul)) AnyFolded = true; |
| 2420 | NewOps.push_back(Mul); |
| 2421 | } |
| 2422 | if (AnyFolded) |
| 2423 | return getAddExpr(NewOps); |
| 2424 | } |
Andrew Trick | e92dcce | 2011-03-14 17:38:54 +0000 | [diff] [blame] | 2425 | else if (const SCEVAddRecExpr * |
| 2426 | AddRec = dyn_cast<SCEVAddRecExpr>(Ops[1])) { |
| 2427 | // Negation preserves a recurrence's no self-wrap property. |
| 2428 | SmallVector<const SCEV *, 4> Operands; |
| 2429 | for (SCEVAddRecExpr::op_iterator I = AddRec->op_begin(), |
| 2430 | E = AddRec->op_end(); I != E; ++I) { |
| 2431 | Operands.push_back(getMulExpr(Ops[0], *I)); |
| 2432 | } |
| 2433 | return getAddRecExpr(Operands, AddRec->getLoop(), |
| 2434 | AddRec->getNoWrapFlags(SCEV::FlagNW)); |
| 2435 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2436 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2437 | } |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 2438 | |
| 2439 | if (Ops.size() == 1) |
| 2440 | return Ops[0]; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2441 | } |
| 2442 | |
| 2443 | // Skip over the add expression until we get to a multiply. |
| 2444 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scMulExpr) |
| 2445 | ++Idx; |
| 2446 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2447 | // If there are mul operands inline them all into this expression. |
| 2448 | if (Idx < Ops.size()) { |
| 2449 | bool DeletedMul = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2450 | while (const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2451 | // If we have an mul, expand the mul operands onto the end of the operands |
| 2452 | // list. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2453 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 2454 | Ops.append(Mul->op_begin(), Mul->op_end()); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2455 | DeletedMul = true; |
| 2456 | } |
| 2457 | |
| 2458 | // If we deleted at least one mul, we added operands to the end of the list, |
| 2459 | // and they are not necessarily sorted. Recurse to resort and resimplify |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 2460 | // any operands we just acquired. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2461 | if (DeletedMul) |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2462 | return getMulExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2463 | } |
| 2464 | |
| 2465 | // If there are any add recurrences in the operands list, see if any other |
| 2466 | // added values are loop invariant. If so, we can fold them into the |
| 2467 | // recurrence. |
| 2468 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddRecExpr) |
| 2469 | ++Idx; |
| 2470 | |
| 2471 | // Scan over all recurrences, trying to fold loop invariants into them. |
| 2472 | for (; Idx < Ops.size() && isa<SCEVAddRecExpr>(Ops[Idx]); ++Idx) { |
| 2473 | // Scan all of the other operands to this mul and add them to the vector if |
| 2474 | // they are loop invariant w.r.t. the recurrence. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2475 | SmallVector<const SCEV *, 8> LIOps; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2476 | const SCEVAddRecExpr *AddRec = cast<SCEVAddRecExpr>(Ops[Idx]); |
Dan Gohman | 0f2de01 | 2010-08-29 14:55:19 +0000 | [diff] [blame] | 2477 | const Loop *AddRecLoop = AddRec->getLoop(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2478 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 2479 | if (isLoopInvariant(Ops[i], AddRecLoop)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2480 | LIOps.push_back(Ops[i]); |
| 2481 | Ops.erase(Ops.begin()+i); |
| 2482 | --i; --e; |
| 2483 | } |
| 2484 | |
| 2485 | // If we found some loop invariants, fold them into the recurrence. |
| 2486 | if (!LIOps.empty()) { |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 2487 | // NLI * LI * {Start,+,Step} --> NLI * {LI*Start,+,LI*Step} |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2488 | SmallVector<const SCEV *, 4> NewOps; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2489 | NewOps.reserve(AddRec->getNumOperands()); |
Dan Gohman | 8f5954f | 2010-06-17 23:34:09 +0000 | [diff] [blame] | 2490 | const SCEV *Scale = getMulExpr(LIOps); |
| 2491 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) |
| 2492 | NewOps.push_back(getMulExpr(Scale, AddRec->getOperand(i))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2493 | |
Dan Gohman | 1620613 | 2010-06-30 07:16:37 +0000 | [diff] [blame] | 2494 | // Build the new addrec. Propagate the NUW and NSW flags if both the |
| 2495 | // outer mul and the inner addrec are guaranteed to have no overflow. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2496 | // |
| 2497 | // No self-wrap cannot be guaranteed after changing the step size, but |
Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 2498 | // will be inferred if either NUW or NSW is true. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2499 | Flags = AddRec->getNoWrapFlags(clearFlags(Flags, SCEV::FlagNW)); |
| 2500 | const SCEV *NewRec = getAddRecExpr(NewOps, AddRecLoop, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2501 | |
| 2502 | // If all of the other operands were loop invariant, we are done. |
| 2503 | if (Ops.size() == 1) return NewRec; |
| 2504 | |
Nick Lewycky | db66b82 | 2011-09-06 05:08:09 +0000 | [diff] [blame] | 2505 | // Otherwise, multiply the folded AddRec by the non-invariant parts. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2506 | for (unsigned i = 0;; ++i) |
| 2507 | if (Ops[i] == AddRec) { |
| 2508 | Ops[i] = NewRec; |
| 2509 | break; |
| 2510 | } |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 2511 | return getMulExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2512 | } |
| 2513 | |
| 2514 | // Okay, if there weren't any loop invariants to be folded, check to see if |
| 2515 | // there are multiple AddRec's with the same loop induction variable being |
| 2516 | // multiplied together. If so, we can fold them. |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2517 | |
| 2518 | // {A1,+,A2,+,...,+,An}<L> * {B1,+,B2,+,...,+,Bn}<L> |
| 2519 | // = {x=1 in [ sum y=x..2x [ sum z=max(y-x, y-n)..min(x,n) [ |
| 2520 | // choose(x, 2x)*choose(2x-y, x-z)*A_{y-z}*B_z |
| 2521 | // ]]],+,...up to x=2n}. |
| 2522 | // Note that the arguments to choose() are always integers with values |
| 2523 | // known at compile time, never SCEV objects. |
| 2524 | // |
| 2525 | // The implementation avoids pointless extra computations when the two |
| 2526 | // addrec's are of different length (mathematically, it's equivalent to |
| 2527 | // an infinite stream of zeros on the right). |
| 2528 | bool OpsModified = false; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2529 | for (unsigned OtherIdx = Idx+1; |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2530 | OtherIdx != Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
Nick Lewycky | e0aa54b | 2011-09-06 21:42:18 +0000 | [diff] [blame] | 2531 | ++OtherIdx) { |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2532 | const SCEVAddRecExpr *OtherAddRec = |
| 2533 | dyn_cast<SCEVAddRecExpr>(Ops[OtherIdx]); |
| 2534 | if (!OtherAddRec || OtherAddRec->getLoop() != AddRecLoop) |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2535 | continue; |
| 2536 | |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2537 | bool Overflow = false; |
| 2538 | Type *Ty = AddRec->getType(); |
| 2539 | bool LargerThan64Bits = getTypeSizeInBits(Ty) > 64; |
| 2540 | SmallVector<const SCEV*, 7> AddRecOps; |
| 2541 | for (int x = 0, xe = AddRec->getNumOperands() + |
| 2542 | OtherAddRec->getNumOperands() - 1; x != xe && !Overflow; ++x) { |
| 2543 | const SCEV *Term = getConstant(Ty, 0); |
| 2544 | for (int y = x, ye = 2*x+1; y != ye && !Overflow; ++y) { |
| 2545 | uint64_t Coeff1 = Choose(x, 2*x - y, Overflow); |
| 2546 | for (int z = std::max(y-x, y-(int)AddRec->getNumOperands()+1), |
| 2547 | ze = std::min(x+1, (int)OtherAddRec->getNumOperands()); |
| 2548 | z < ze && !Overflow; ++z) { |
| 2549 | uint64_t Coeff2 = Choose(2*x - y, x-z, Overflow); |
| 2550 | uint64_t Coeff; |
| 2551 | if (LargerThan64Bits) |
| 2552 | Coeff = umul_ov(Coeff1, Coeff2, Overflow); |
| 2553 | else |
| 2554 | Coeff = Coeff1*Coeff2; |
| 2555 | const SCEV *CoeffTerm = getConstant(Ty, Coeff); |
| 2556 | const SCEV *Term1 = AddRec->getOperand(y-z); |
| 2557 | const SCEV *Term2 = OtherAddRec->getOperand(z); |
| 2558 | Term = getAddExpr(Term, getMulExpr(CoeffTerm, Term1,Term2)); |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2559 | } |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2560 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2561 | AddRecOps.push_back(Term); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2562 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2563 | if (!Overflow) { |
| 2564 | const SCEV *NewAddRec = getAddRecExpr(AddRecOps, AddRec->getLoop(), |
| 2565 | SCEV::FlagAnyWrap); |
| 2566 | if (Ops.size() == 2) return NewAddRec; |
| 2567 | Ops[Idx] = NewAddRec; |
| 2568 | Ops.erase(Ops.begin() + OtherIdx); --OtherIdx; |
| 2569 | OpsModified = true; |
| 2570 | AddRec = dyn_cast<SCEVAddRecExpr>(NewAddRec); |
| 2571 | if (!AddRec) |
| 2572 | break; |
| 2573 | } |
Nick Lewycky | e0aa54b | 2011-09-06 21:42:18 +0000 | [diff] [blame] | 2574 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2575 | if (OpsModified) |
| 2576 | return getMulExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2577 | |
| 2578 | // Otherwise couldn't fold anything into this recurrence. Move onto the |
| 2579 | // next one. |
| 2580 | } |
| 2581 | |
| 2582 | // Okay, it looks like we really DO need an mul expr. Check to see if we |
| 2583 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2584 | FoldingSetNodeID ID; |
| 2585 | ID.AddInteger(scMulExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2586 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 2587 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2588 | void *IP = nullptr; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2589 | SCEVMulExpr *S = |
| 2590 | static_cast<SCEVMulExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 2591 | if (!S) { |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2592 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 2593 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 2594 | S = new (SCEVAllocator) SCEVMulExpr(ID.Intern(SCEVAllocator), |
| 2595 | O, Ops.size()); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2596 | UniqueSCEVs.InsertNode(S, IP); |
| 2597 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2598 | S->setNoWrapFlags(Flags); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2599 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2600 | } |
| 2601 | |
Andreas Bolka | 7a5c8db | 2009-08-07 22:55:26 +0000 | [diff] [blame] | 2602 | /// getUDivExpr - Get a canonical unsigned division expression, or something |
| 2603 | /// simpler if possible. |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 2604 | const SCEV *ScalarEvolution::getUDivExpr(const SCEV *LHS, |
| 2605 | const SCEV *RHS) { |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2606 | assert(getEffectiveSCEVType(LHS->getType()) == |
| 2607 | getEffectiveSCEVType(RHS->getType()) && |
| 2608 | "SCEVUDivExpr operand types don't match!"); |
| 2609 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2610 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2611 | if (RHSC->getValue()->equalsInt(1)) |
Dan Gohman | 8a8ad7d | 2009-08-20 16:42:55 +0000 | [diff] [blame] | 2612 | return LHS; // X udiv 1 --> x |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2613 | // If the denominator is zero, the result of the udiv is undefined. Don't |
| 2614 | // try to analyze it, because the resolution chosen here may differ from |
| 2615 | // the resolution chosen in other parts of the compiler. |
| 2616 | if (!RHSC->getValue()->isZero()) { |
| 2617 | // Determine if the division can be folded into the operands of |
| 2618 | // its operands. |
| 2619 | // TODO: Generalize this to non-constants by using known-bits information. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2620 | Type *Ty = LHS->getType(); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2621 | unsigned LZ = RHSC->getValue()->getValue().countLeadingZeros(); |
Dan Gohman | db764c6 | 2010-08-04 19:52:50 +0000 | [diff] [blame] | 2622 | unsigned MaxShiftAmt = getTypeSizeInBits(Ty) - LZ - 1; |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2623 | // For non-power-of-two values, effectively round the value up to the |
| 2624 | // nearest power of two. |
| 2625 | if (!RHSC->getValue()->getValue().isPowerOf2()) |
| 2626 | ++MaxShiftAmt; |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2627 | IntegerType *ExtTy = |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2628 | IntegerType::get(getContext(), getTypeSizeInBits(Ty) + MaxShiftAmt); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2629 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(LHS)) |
| 2630 | if (const SCEVConstant *Step = |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 2631 | dyn_cast<SCEVConstant>(AR->getStepRecurrence(*this))) { |
| 2632 | // {X,+,N}/C --> {X/C,+,N/C} if safe and N/C can be folded. |
| 2633 | const APInt &StepInt = Step->getValue()->getValue(); |
| 2634 | const APInt &DivInt = RHSC->getValue()->getValue(); |
| 2635 | if (!StepInt.urem(DivInt) && |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2636 | getZeroExtendExpr(AR, ExtTy) == |
| 2637 | getAddRecExpr(getZeroExtendExpr(AR->getStart(), ExtTy), |
| 2638 | getZeroExtendExpr(Step, ExtTy), |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2639 | AR->getLoop(), SCEV::FlagAnyWrap)) { |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2640 | SmallVector<const SCEV *, 4> Operands; |
| 2641 | for (unsigned i = 0, e = AR->getNumOperands(); i != e; ++i) |
| 2642 | Operands.push_back(getUDivExpr(AR->getOperand(i), RHS)); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2643 | return getAddRecExpr(Operands, AR->getLoop(), |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2644 | SCEV::FlagNW); |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 2645 | } |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 2646 | /// Get a canonical UDivExpr for a recurrence. |
| 2647 | /// {X,+,N}/C => {Y,+,N}/C where Y=X-(X%N). Safe when C%N=0. |
| 2648 | // We can currently only fold X%N if X is constant. |
| 2649 | const SCEVConstant *StartC = dyn_cast<SCEVConstant>(AR->getStart()); |
| 2650 | if (StartC && !DivInt.urem(StepInt) && |
| 2651 | getZeroExtendExpr(AR, ExtTy) == |
| 2652 | getAddRecExpr(getZeroExtendExpr(AR->getStart(), ExtTy), |
| 2653 | getZeroExtendExpr(Step, ExtTy), |
| 2654 | AR->getLoop(), SCEV::FlagAnyWrap)) { |
| 2655 | const APInt &StartInt = StartC->getValue()->getValue(); |
| 2656 | const APInt &StartRem = StartInt.urem(StepInt); |
| 2657 | if (StartRem != 0) |
| 2658 | LHS = getAddRecExpr(getConstant(StartInt - StartRem), Step, |
| 2659 | AR->getLoop(), SCEV::FlagNW); |
| 2660 | } |
| 2661 | } |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2662 | // (A*B)/C --> A*(B/C) if safe and B/C can be folded. |
| 2663 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(LHS)) { |
| 2664 | SmallVector<const SCEV *, 4> Operands; |
| 2665 | for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) |
| 2666 | Operands.push_back(getZeroExtendExpr(M->getOperand(i), ExtTy)); |
| 2667 | if (getZeroExtendExpr(M, ExtTy) == getMulExpr(Operands)) |
| 2668 | // Find an operand that's safely divisible. |
| 2669 | for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) { |
| 2670 | const SCEV *Op = M->getOperand(i); |
| 2671 | const SCEV *Div = getUDivExpr(Op, RHSC); |
| 2672 | if (!isa<SCEVUDivExpr>(Div) && getMulExpr(Div, RHSC) == Op) { |
| 2673 | Operands = SmallVector<const SCEV *, 4>(M->op_begin(), |
| 2674 | M->op_end()); |
| 2675 | Operands[i] = Div; |
| 2676 | return getMulExpr(Operands); |
| 2677 | } |
| 2678 | } |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 2679 | } |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2680 | // (A+B)/C --> (A/C + B/C) if safe and A/C and B/C can be folded. |
Andrew Trick | 7d1eea8 | 2011-04-27 18:17:36 +0000 | [diff] [blame] | 2681 | if (const SCEVAddExpr *A = dyn_cast<SCEVAddExpr>(LHS)) { |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2682 | SmallVector<const SCEV *, 4> Operands; |
| 2683 | for (unsigned i = 0, e = A->getNumOperands(); i != e; ++i) |
| 2684 | Operands.push_back(getZeroExtendExpr(A->getOperand(i), ExtTy)); |
| 2685 | if (getZeroExtendExpr(A, ExtTy) == getAddExpr(Operands)) { |
| 2686 | Operands.clear(); |
| 2687 | for (unsigned i = 0, e = A->getNumOperands(); i != e; ++i) { |
| 2688 | const SCEV *Op = getUDivExpr(A->getOperand(i), RHS); |
| 2689 | if (isa<SCEVUDivExpr>(Op) || |
| 2690 | getMulExpr(Op, RHS) != A->getOperand(i)) |
| 2691 | break; |
| 2692 | Operands.push_back(Op); |
| 2693 | } |
| 2694 | if (Operands.size() == A->getNumOperands()) |
| 2695 | return getAddExpr(Operands); |
| 2696 | } |
| 2697 | } |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 2698 | |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 2699 | // Fold if both operands are constant. |
| 2700 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(LHS)) { |
| 2701 | Constant *LHSCV = LHSC->getValue(); |
| 2702 | Constant *RHSCV = RHSC->getValue(); |
| 2703 | return getConstant(cast<ConstantInt>(ConstantExpr::getUDiv(LHSCV, |
| 2704 | RHSCV))); |
| 2705 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2706 | } |
| 2707 | } |
| 2708 | |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2709 | FoldingSetNodeID ID; |
| 2710 | ID.AddInteger(scUDivExpr); |
| 2711 | ID.AddPointer(LHS); |
| 2712 | ID.AddPointer(RHS); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2713 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2714 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 2715 | SCEV *S = new (SCEVAllocator) SCEVUDivExpr(ID.Intern(SCEVAllocator), |
| 2716 | LHS, RHS); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2717 | UniqueSCEVs.InsertNode(S, IP); |
| 2718 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2719 | } |
| 2720 | |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 2721 | static const APInt gcd(const SCEVConstant *C1, const SCEVConstant *C2) { |
| 2722 | APInt A = C1->getValue()->getValue().abs(); |
| 2723 | APInt B = C2->getValue()->getValue().abs(); |
| 2724 | uint32_t ABW = A.getBitWidth(); |
| 2725 | uint32_t BBW = B.getBitWidth(); |
| 2726 | |
| 2727 | if (ABW > BBW) |
| 2728 | B = B.zext(ABW); |
| 2729 | else if (ABW < BBW) |
| 2730 | A = A.zext(BBW); |
| 2731 | |
| 2732 | return APIntOps::GreatestCommonDivisor(A, B); |
| 2733 | } |
| 2734 | |
| 2735 | /// getUDivExactExpr - Get a canonical unsigned division expression, or |
| 2736 | /// something simpler if possible. There is no representation for an exact udiv |
| 2737 | /// in SCEV IR, but we can attempt to remove factors from the LHS and RHS. |
| 2738 | /// We can't do this when it's not exact because the udiv may be clearing bits. |
| 2739 | const SCEV *ScalarEvolution::getUDivExactExpr(const SCEV *LHS, |
| 2740 | const SCEV *RHS) { |
| 2741 | // TODO: we could try to find factors in all sorts of things, but for now we |
| 2742 | // just deal with u/exact (multiply, constant). See SCEVDivision towards the |
| 2743 | // end of this file for inspiration. |
| 2744 | |
| 2745 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(LHS); |
| 2746 | if (!Mul) |
| 2747 | return getUDivExpr(LHS, RHS); |
| 2748 | |
| 2749 | if (const SCEVConstant *RHSCst = dyn_cast<SCEVConstant>(RHS)) { |
| 2750 | // If the mulexpr multiplies by a constant, then that constant must be the |
| 2751 | // first element of the mulexpr. |
| 2752 | if (const SCEVConstant *LHSCst = |
| 2753 | dyn_cast<SCEVConstant>(Mul->getOperand(0))) { |
| 2754 | if (LHSCst == RHSCst) { |
| 2755 | SmallVector<const SCEV *, 2> Operands; |
| 2756 | Operands.append(Mul->op_begin() + 1, Mul->op_end()); |
| 2757 | return getMulExpr(Operands); |
| 2758 | } |
| 2759 | |
| 2760 | // We can't just assume that LHSCst divides RHSCst cleanly, it could be |
| 2761 | // that there's a factor provided by one of the other terms. We need to |
| 2762 | // check. |
| 2763 | APInt Factor = gcd(LHSCst, RHSCst); |
| 2764 | if (!Factor.isIntN(1)) { |
| 2765 | LHSCst = cast<SCEVConstant>( |
| 2766 | getConstant(LHSCst->getValue()->getValue().udiv(Factor))); |
| 2767 | RHSCst = cast<SCEVConstant>( |
| 2768 | getConstant(RHSCst->getValue()->getValue().udiv(Factor))); |
| 2769 | SmallVector<const SCEV *, 2> Operands; |
| 2770 | Operands.push_back(LHSCst); |
| 2771 | Operands.append(Mul->op_begin() + 1, Mul->op_end()); |
| 2772 | LHS = getMulExpr(Operands); |
| 2773 | RHS = RHSCst; |
Nick Lewycky | 629199c | 2014-01-27 10:47:44 +0000 | [diff] [blame] | 2774 | Mul = dyn_cast<SCEVMulExpr>(LHS); |
| 2775 | if (!Mul) |
| 2776 | return getUDivExactExpr(LHS, RHS); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 2777 | } |
| 2778 | } |
| 2779 | } |
| 2780 | |
| 2781 | for (int i = 0, e = Mul->getNumOperands(); i != e; ++i) { |
| 2782 | if (Mul->getOperand(i) == RHS) { |
| 2783 | SmallVector<const SCEV *, 2> Operands; |
| 2784 | Operands.append(Mul->op_begin(), Mul->op_begin() + i); |
| 2785 | Operands.append(Mul->op_begin() + i + 1, Mul->op_end()); |
| 2786 | return getMulExpr(Operands); |
| 2787 | } |
| 2788 | } |
| 2789 | |
| 2790 | return getUDivExpr(LHS, RHS); |
| 2791 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2792 | |
Dan Gohman | 4d5435d | 2009-05-24 23:45:28 +0000 | [diff] [blame] | 2793 | /// getAddRecExpr - Get an add recurrence expression for the specified loop. |
| 2794 | /// Simplify the expression as much as possible. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2795 | const SCEV *ScalarEvolution::getAddRecExpr(const SCEV *Start, const SCEV *Step, |
| 2796 | const Loop *L, |
| 2797 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2798 | SmallVector<const SCEV *, 4> Operands; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2799 | Operands.push_back(Start); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2800 | if (const SCEVAddRecExpr *StepChrec = dyn_cast<SCEVAddRecExpr>(Step)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2801 | if (StepChrec->getLoop() == L) { |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 2802 | Operands.append(StepChrec->op_begin(), StepChrec->op_end()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2803 | return getAddRecExpr(Operands, L, maskFlags(Flags, SCEV::FlagNW)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2804 | } |
| 2805 | |
| 2806 | Operands.push_back(Step); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2807 | return getAddRecExpr(Operands, L, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2808 | } |
| 2809 | |
Dan Gohman | 4d5435d | 2009-05-24 23:45:28 +0000 | [diff] [blame] | 2810 | /// getAddRecExpr - Get an add recurrence expression for the specified loop. |
| 2811 | /// Simplify the expression as much as possible. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2812 | const SCEV * |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2813 | ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2814 | const Loop *L, SCEV::NoWrapFlags Flags) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2815 | if (Operands.size() == 1) return Operands[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2816 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2817 | Type *ETy = getEffectiveSCEVType(Operands[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2818 | for (unsigned i = 1, e = Operands.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 2819 | assert(getEffectiveSCEVType(Operands[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2820 | "SCEVAddRecExpr operand types don't match!"); |
Dan Gohman | d3a32ae | 2010-11-17 20:48:38 +0000 | [diff] [blame] | 2821 | for (unsigned i = 0, e = Operands.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 2822 | assert(isLoopInvariant(Operands[i], L) && |
Dan Gohman | d3a32ae | 2010-11-17 20:48:38 +0000 | [diff] [blame] | 2823 | "SCEVAddRecExpr operand is not loop-invariant!"); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2824 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2825 | |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 2826 | if (Operands.back()->isZero()) { |
| 2827 | Operands.pop_back(); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2828 | return getAddRecExpr(Operands, L, SCEV::FlagAnyWrap); // {X,+,0} --> X |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 2829 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2830 | |
Dan Gohman | cf9c64e | 2010-02-19 18:49:22 +0000 | [diff] [blame] | 2831 | // It's tempting to want to call getMaxBackedgeTakenCount count here and |
| 2832 | // use that information to infer NUW and NSW flags. However, computing a |
| 2833 | // BE count requires calling getAddRecExpr, so we may not yet have a |
| 2834 | // meaningful BE count at this point (and if we don't, we'd be stuck |
| 2835 | // with a SCEVCouldNotCompute as the cached BE count). |
| 2836 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2837 | Flags = StrengthenNoWrapFlags(this, scAddRecExpr, Operands, Flags); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2838 | |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 2839 | // Canonicalize nested AddRecs in by nesting them in order of loop depth. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2840 | if (const SCEVAddRecExpr *NestedAR = dyn_cast<SCEVAddRecExpr>(Operands[0])) { |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 2841 | const Loop *NestedLoop = NestedAR->getLoop(); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 2842 | if (L->contains(NestedLoop) |
| 2843 | ? (L->getLoopDepth() < NestedLoop->getLoopDepth()) |
| 2844 | : (!NestedLoop->contains(L) && |
| 2845 | DT.dominates(L->getHeader(), NestedLoop->getHeader()))) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2846 | SmallVector<const SCEV *, 4> NestedOperands(NestedAR->op_begin(), |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 2847 | NestedAR->op_end()); |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 2848 | Operands[0] = NestedAR->getStart(); |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2849 | // AddRecs require their operands be loop-invariant with respect to their |
| 2850 | // loops. Don't perform this transformation if it would break this |
| 2851 | // requirement. |
| 2852 | bool AllInvariant = true; |
| 2853 | for (unsigned i = 0, e = Operands.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 2854 | if (!isLoopInvariant(Operands[i], L)) { |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2855 | AllInvariant = false; |
| 2856 | break; |
| 2857 | } |
| 2858 | if (AllInvariant) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2859 | // Create a recurrence for the outer loop with the same step size. |
| 2860 | // |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2861 | // The outer recurrence keeps its NW flag but only keeps NUW/NSW if the |
| 2862 | // inner recurrence has the same property. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2863 | SCEV::NoWrapFlags OuterFlags = |
| 2864 | maskFlags(Flags, SCEV::FlagNW | NestedAR->getNoWrapFlags()); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2865 | |
| 2866 | NestedOperands[0] = getAddRecExpr(Operands, L, OuterFlags); |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2867 | AllInvariant = true; |
| 2868 | for (unsigned i = 0, e = NestedOperands.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 2869 | if (!isLoopInvariant(NestedOperands[i], NestedLoop)) { |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2870 | AllInvariant = false; |
| 2871 | break; |
| 2872 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2873 | if (AllInvariant) { |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2874 | // Ok, both add recurrences are valid after the transformation. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2875 | // |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2876 | // The inner recurrence keeps its NW flag but only keeps NUW/NSW if |
| 2877 | // the outer recurrence has the same property. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2878 | SCEV::NoWrapFlags InnerFlags = |
| 2879 | maskFlags(NestedAR->getNoWrapFlags(), SCEV::FlagNW | Flags); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2880 | return getAddRecExpr(NestedOperands, NestedLoop, InnerFlags); |
| 2881 | } |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 2882 | } |
| 2883 | // Reset Operands to its original state. |
| 2884 | Operands[0] = NestedAR; |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 2885 | } |
| 2886 | } |
| 2887 | |
Dan Gohman | 8d67d2f | 2010-01-19 22:27:22 +0000 | [diff] [blame] | 2888 | // Okay, it looks like we really DO need an addrec expr. Check to see if we |
| 2889 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2890 | FoldingSetNodeID ID; |
| 2891 | ID.AddInteger(scAddRecExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2892 | for (unsigned i = 0, e = Operands.size(); i != e; ++i) |
| 2893 | ID.AddPointer(Operands[i]); |
| 2894 | ID.AddPointer(L); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2895 | void *IP = nullptr; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2896 | SCEVAddRecExpr *S = |
| 2897 | static_cast<SCEVAddRecExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 2898 | if (!S) { |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2899 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Operands.size()); |
| 2900 | std::uninitialized_copy(Operands.begin(), Operands.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 2901 | S = new (SCEVAllocator) SCEVAddRecExpr(ID.Intern(SCEVAllocator), |
| 2902 | O, Operands.size(), L); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2903 | UniqueSCEVs.InsertNode(S, IP); |
| 2904 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2905 | S->setNoWrapFlags(Flags); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2906 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2907 | } |
| 2908 | |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 2909 | const SCEV * |
| 2910 | ScalarEvolution::getGEPExpr(Type *PointeeType, const SCEV *BaseExpr, |
| 2911 | const SmallVectorImpl<const SCEV *> &IndexExprs, |
| 2912 | bool InBounds) { |
| 2913 | // getSCEV(Base)->getType() has the same address space as Base->getType() |
| 2914 | // because SCEV::getType() preserves the address space. |
| 2915 | Type *IntPtrTy = getEffectiveSCEVType(BaseExpr->getType()); |
| 2916 | // FIXME(PR23527): Don't blindly transfer the inbounds flag from the GEP |
| 2917 | // instruction to its SCEV, because the Instruction may be guarded by control |
| 2918 | // flow and the no-overflow bits may not be valid for the expression in any |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 2919 | // context. This can be fixed similarly to how these flags are handled for |
| 2920 | // adds. |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 2921 | SCEV::NoWrapFlags Wrap = InBounds ? SCEV::FlagNSW : SCEV::FlagAnyWrap; |
| 2922 | |
| 2923 | const SCEV *TotalOffset = getConstant(IntPtrTy, 0); |
| 2924 | // The address space is unimportant. The first thing we do on CurTy is getting |
| 2925 | // its element type. |
| 2926 | Type *CurTy = PointerType::getUnqual(PointeeType); |
| 2927 | for (const SCEV *IndexExpr : IndexExprs) { |
| 2928 | // Compute the (potentially symbolic) offset in bytes for this index. |
| 2929 | if (StructType *STy = dyn_cast<StructType>(CurTy)) { |
| 2930 | // For a struct, add the member offset. |
| 2931 | ConstantInt *Index = cast<SCEVConstant>(IndexExpr)->getValue(); |
| 2932 | unsigned FieldNo = Index->getZExtValue(); |
| 2933 | const SCEV *FieldOffset = getOffsetOfExpr(IntPtrTy, STy, FieldNo); |
| 2934 | |
| 2935 | // Add the field offset to the running total offset. |
| 2936 | TotalOffset = getAddExpr(TotalOffset, FieldOffset); |
| 2937 | |
| 2938 | // Update CurTy to the type of the field at Index. |
| 2939 | CurTy = STy->getTypeAtIndex(Index); |
| 2940 | } else { |
| 2941 | // Update CurTy to its element type. |
| 2942 | CurTy = cast<SequentialType>(CurTy)->getElementType(); |
| 2943 | // For an array, add the element offset, explicitly scaled. |
| 2944 | const SCEV *ElementSize = getSizeOfExpr(IntPtrTy, CurTy); |
| 2945 | // Getelementptr indices are signed. |
| 2946 | IndexExpr = getTruncateOrSignExtend(IndexExpr, IntPtrTy); |
| 2947 | |
| 2948 | // Multiply the index by the element size to compute the element offset. |
| 2949 | const SCEV *LocalOffset = getMulExpr(IndexExpr, ElementSize, Wrap); |
| 2950 | |
| 2951 | // Add the element offset to the running total offset. |
| 2952 | TotalOffset = getAddExpr(TotalOffset, LocalOffset); |
| 2953 | } |
| 2954 | } |
| 2955 | |
| 2956 | // Add the total offset from all the GEP indices to the base. |
| 2957 | return getAddExpr(BaseExpr, TotalOffset, Wrap); |
| 2958 | } |
| 2959 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 2960 | const SCEV *ScalarEvolution::getSMaxExpr(const SCEV *LHS, |
| 2961 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2962 | SmallVector<const SCEV *, 2> Ops; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2963 | Ops.push_back(LHS); |
| 2964 | Ops.push_back(RHS); |
| 2965 | return getSMaxExpr(Ops); |
| 2966 | } |
| 2967 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2968 | const SCEV * |
| 2969 | ScalarEvolution::getSMaxExpr(SmallVectorImpl<const SCEV *> &Ops) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2970 | assert(!Ops.empty() && "Cannot get empty smax!"); |
| 2971 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2972 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2973 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2974 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 2975 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2976 | "SCEVSMaxExpr operand types don't match!"); |
| 2977 | #endif |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2978 | |
| 2979 | // Sort by complexity, this groups all similar expression types together. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 2980 | GroupByComplexity(Ops, &LI); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2981 | |
| 2982 | // If there are any constants, fold them together. |
| 2983 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2984 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2985 | ++Idx; |
| 2986 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2987 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2988 | // We found two constants, fold them together! |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 2989 | ConstantInt *Fold = ConstantInt::get(getContext(), |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2990 | APIntOps::smax(LHSC->getValue()->getValue(), |
| 2991 | RHSC->getValue()->getValue())); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 2992 | Ops[0] = getConstant(Fold); |
| 2993 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 2994 | if (Ops.size() == 1) return Ops[0]; |
| 2995 | LHSC = cast<SCEVConstant>(Ops[0]); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2996 | } |
| 2997 | |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 2998 | // If we are left with a constant minimum-int, strip it off. |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 2999 | if (cast<SCEVConstant>(Ops[0])->getValue()->isMinValue(true)) { |
| 3000 | Ops.erase(Ops.begin()); |
| 3001 | --Idx; |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3002 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isMaxValue(true)) { |
| 3003 | // If we have an smax with a constant maximum-int, it will always be |
| 3004 | // maximum-int. |
| 3005 | return Ops[0]; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3006 | } |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3007 | |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 3008 | if (Ops.size() == 1) return Ops[0]; |
| 3009 | } |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3010 | |
| 3011 | // Find the first SMax |
| 3012 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scSMaxExpr) |
| 3013 | ++Idx; |
| 3014 | |
| 3015 | // Check to see if one of the operands is an SMax. If so, expand its operands |
| 3016 | // onto our operand list, and recurse to simplify. |
| 3017 | if (Idx < Ops.size()) { |
| 3018 | bool DeletedSMax = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3019 | while (const SCEVSMaxExpr *SMax = dyn_cast<SCEVSMaxExpr>(Ops[Idx])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3020 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 3021 | Ops.append(SMax->op_begin(), SMax->op_end()); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3022 | DeletedSMax = true; |
| 3023 | } |
| 3024 | |
| 3025 | if (DeletedSMax) |
| 3026 | return getSMaxExpr(Ops); |
| 3027 | } |
| 3028 | |
| 3029 | // Okay, check to see if the same value occurs in the operand list twice. If |
| 3030 | // so, delete one. Since we sorted the list, these values are required to |
| 3031 | // be adjacent. |
| 3032 | for (unsigned i = 0, e = Ops.size()-1; i != e; ++i) |
Dan Gohman | 7ef0dc2 | 2010-04-13 16:51:03 +0000 | [diff] [blame] | 3033 | // X smax Y smax Y --> X smax Y |
| 3034 | // X smax Y --> X, if X is always greater than Y |
| 3035 | if (Ops[i] == Ops[i+1] || |
| 3036 | isKnownPredicate(ICmpInst::ICMP_SGE, Ops[i], Ops[i+1])) { |
| 3037 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+2); |
| 3038 | --i; --e; |
| 3039 | } else if (isKnownPredicate(ICmpInst::ICMP_SLE, Ops[i], Ops[i+1])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3040 | Ops.erase(Ops.begin()+i, Ops.begin()+i+1); |
| 3041 | --i; --e; |
| 3042 | } |
| 3043 | |
| 3044 | if (Ops.size() == 1) return Ops[0]; |
| 3045 | |
| 3046 | assert(!Ops.empty() && "Reduced smax down to nothing!"); |
| 3047 | |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3048 | // Okay, it looks like we really DO need an smax expr. Check to see if we |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3049 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3050 | FoldingSetNodeID ID; |
| 3051 | ID.AddInteger(scSMaxExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3052 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 3053 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3054 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3055 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 3056 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 3057 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3058 | SCEV *S = new (SCEVAllocator) SCEVSMaxExpr(ID.Intern(SCEVAllocator), |
| 3059 | O, Ops.size()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3060 | UniqueSCEVs.InsertNode(S, IP); |
| 3061 | return S; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3062 | } |
| 3063 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3064 | const SCEV *ScalarEvolution::getUMaxExpr(const SCEV *LHS, |
| 3065 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3066 | SmallVector<const SCEV *, 2> Ops; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3067 | Ops.push_back(LHS); |
| 3068 | Ops.push_back(RHS); |
| 3069 | return getUMaxExpr(Ops); |
| 3070 | } |
| 3071 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3072 | const SCEV * |
| 3073 | ScalarEvolution::getUMaxExpr(SmallVectorImpl<const SCEV *> &Ops) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3074 | assert(!Ops.empty() && "Cannot get empty umax!"); |
| 3075 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3076 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3077 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3078 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 3079 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3080 | "SCEVUMaxExpr operand types don't match!"); |
| 3081 | #endif |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3082 | |
| 3083 | // Sort by complexity, this groups all similar expression types together. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3084 | GroupByComplexity(Ops, &LI); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3085 | |
| 3086 | // If there are any constants, fold them together. |
| 3087 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3088 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3089 | ++Idx; |
| 3090 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3091 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3092 | // We found two constants, fold them together! |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 3093 | ConstantInt *Fold = ConstantInt::get(getContext(), |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3094 | APIntOps::umax(LHSC->getValue()->getValue(), |
| 3095 | RHSC->getValue()->getValue())); |
| 3096 | Ops[0] = getConstant(Fold); |
| 3097 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 3098 | if (Ops.size() == 1) return Ops[0]; |
| 3099 | LHSC = cast<SCEVConstant>(Ops[0]); |
| 3100 | } |
| 3101 | |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3102 | // If we are left with a constant minimum-int, strip it off. |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3103 | if (cast<SCEVConstant>(Ops[0])->getValue()->isMinValue(false)) { |
| 3104 | Ops.erase(Ops.begin()); |
| 3105 | --Idx; |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3106 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isMaxValue(false)) { |
| 3107 | // If we have an umax with a constant maximum-int, it will always be |
| 3108 | // maximum-int. |
| 3109 | return Ops[0]; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3110 | } |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3111 | |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 3112 | if (Ops.size() == 1) return Ops[0]; |
| 3113 | } |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3114 | |
| 3115 | // Find the first UMax |
| 3116 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scUMaxExpr) |
| 3117 | ++Idx; |
| 3118 | |
| 3119 | // Check to see if one of the operands is a UMax. If so, expand its operands |
| 3120 | // onto our operand list, and recurse to simplify. |
| 3121 | if (Idx < Ops.size()) { |
| 3122 | bool DeletedUMax = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3123 | while (const SCEVUMaxExpr *UMax = dyn_cast<SCEVUMaxExpr>(Ops[Idx])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3124 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 3125 | Ops.append(UMax->op_begin(), UMax->op_end()); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3126 | DeletedUMax = true; |
| 3127 | } |
| 3128 | |
| 3129 | if (DeletedUMax) |
| 3130 | return getUMaxExpr(Ops); |
| 3131 | } |
| 3132 | |
| 3133 | // Okay, check to see if the same value occurs in the operand list twice. If |
| 3134 | // so, delete one. Since we sorted the list, these values are required to |
| 3135 | // be adjacent. |
| 3136 | for (unsigned i = 0, e = Ops.size()-1; i != e; ++i) |
Dan Gohman | 7ef0dc2 | 2010-04-13 16:51:03 +0000 | [diff] [blame] | 3137 | // X umax Y umax Y --> X umax Y |
| 3138 | // X umax Y --> X, if X is always greater than Y |
| 3139 | if (Ops[i] == Ops[i+1] || |
| 3140 | isKnownPredicate(ICmpInst::ICMP_UGE, Ops[i], Ops[i+1])) { |
| 3141 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+2); |
| 3142 | --i; --e; |
| 3143 | } else if (isKnownPredicate(ICmpInst::ICMP_ULE, Ops[i], Ops[i+1])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3144 | Ops.erase(Ops.begin()+i, Ops.begin()+i+1); |
| 3145 | --i; --e; |
| 3146 | } |
| 3147 | |
| 3148 | if (Ops.size() == 1) return Ops[0]; |
| 3149 | |
| 3150 | assert(!Ops.empty() && "Reduced umax down to nothing!"); |
| 3151 | |
| 3152 | // Okay, it looks like we really DO need a umax expr. Check to see if we |
| 3153 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3154 | FoldingSetNodeID ID; |
| 3155 | ID.AddInteger(scUMaxExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3156 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 3157 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3158 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3159 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 3160 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 3161 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3162 | SCEV *S = new (SCEVAllocator) SCEVUMaxExpr(ID.Intern(SCEVAllocator), |
| 3163 | O, Ops.size()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3164 | UniqueSCEVs.InsertNode(S, IP); |
| 3165 | return S; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3166 | } |
| 3167 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3168 | const SCEV *ScalarEvolution::getSMinExpr(const SCEV *LHS, |
| 3169 | const SCEV *RHS) { |
Dan Gohman | 692b468 | 2009-06-22 03:18:45 +0000 | [diff] [blame] | 3170 | // ~smax(~x, ~y) == smin(x, y). |
| 3171 | return getNotSCEV(getSMaxExpr(getNotSCEV(LHS), getNotSCEV(RHS))); |
| 3172 | } |
| 3173 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3174 | const SCEV *ScalarEvolution::getUMinExpr(const SCEV *LHS, |
| 3175 | const SCEV *RHS) { |
Dan Gohman | 692b468 | 2009-06-22 03:18:45 +0000 | [diff] [blame] | 3176 | // ~umax(~x, ~y) == umin(x, y) |
| 3177 | return getNotSCEV(getUMaxExpr(getNotSCEV(LHS), getNotSCEV(RHS))); |
| 3178 | } |
| 3179 | |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3180 | const SCEV *ScalarEvolution::getSizeOfExpr(Type *IntTy, Type *AllocTy) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3181 | // We can bypass creating a target-independent |
Dan Gohman | 11862a6 | 2010-04-12 23:03:26 +0000 | [diff] [blame] | 3182 | // constant expression and then folding it back into a ConstantInt. |
| 3183 | // This is just a compile-time optimization. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3184 | return getConstant(IntTy, |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3185 | F.getParent()->getDataLayout().getTypeAllocSize(AllocTy)); |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 3186 | } |
| 3187 | |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3188 | const SCEV *ScalarEvolution::getOffsetOfExpr(Type *IntTy, |
| 3189 | StructType *STy, |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 3190 | unsigned FieldNo) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3191 | // We can bypass creating a target-independent |
Dan Gohman | 11862a6 | 2010-04-12 23:03:26 +0000 | [diff] [blame] | 3192 | // constant expression and then folding it back into a ConstantInt. |
| 3193 | // This is just a compile-time optimization. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3194 | return getConstant( |
| 3195 | IntTy, |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3196 | F.getParent()->getDataLayout().getStructLayout(STy)->getElementOffset( |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3197 | FieldNo)); |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3198 | } |
| 3199 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3200 | const SCEV *ScalarEvolution::getUnknown(Value *V) { |
Dan Gohman | f436bac | 2009-06-24 00:54:57 +0000 | [diff] [blame] | 3201 | // Don't attempt to do anything other than create a SCEVUnknown object |
| 3202 | // here. createSCEV only calls getUnknown after checking for all other |
| 3203 | // interesting possibilities, and any other code that calls getUnknown |
| 3204 | // is doing so in order to hide a value from SCEV canonicalization. |
| 3205 | |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3206 | FoldingSetNodeID ID; |
| 3207 | ID.AddInteger(scUnknown); |
| 3208 | ID.AddPointer(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3209 | void *IP = nullptr; |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 3210 | if (SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) { |
| 3211 | assert(cast<SCEVUnknown>(S)->getValue() == V && |
| 3212 | "Stale SCEVUnknown in uniquing map!"); |
| 3213 | return S; |
| 3214 | } |
| 3215 | SCEV *S = new (SCEVAllocator) SCEVUnknown(ID.Intern(SCEVAllocator), V, this, |
| 3216 | FirstUnknown); |
| 3217 | FirstUnknown = cast<SCEVUnknown>(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3218 | UniqueSCEVs.InsertNode(S, IP); |
| 3219 | return S; |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 3220 | } |
| 3221 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3222 | //===----------------------------------------------------------------------===// |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3223 | // Basic SCEV Analysis and PHI Idiom Recognition Code |
| 3224 | // |
| 3225 | |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3226 | /// isSCEVable - Test if values of the given type are analyzable within |
| 3227 | /// the SCEV framework. This primarily includes integer types, and it |
| 3228 | /// can optionally include pointer types if the ScalarEvolution class |
| 3229 | /// has access to target-specific information. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3230 | bool ScalarEvolution::isSCEVable(Type *Ty) const { |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3231 | // Integers and pointers are always SCEVable. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3232 | return Ty->isIntegerTy() || Ty->isPointerTy(); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3233 | } |
| 3234 | |
| 3235 | /// getTypeSizeInBits - Return the size in bits of the specified type, |
| 3236 | /// for which isSCEVable must return true. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3237 | uint64_t ScalarEvolution::getTypeSizeInBits(Type *Ty) const { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3238 | assert(isSCEVable(Ty) && "Type is not SCEVable!"); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3239 | return F.getParent()->getDataLayout().getTypeSizeInBits(Ty); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3240 | } |
| 3241 | |
| 3242 | /// getEffectiveSCEVType - Return a type with the same bitwidth as |
| 3243 | /// the given type and which represents how SCEV will treat the given |
| 3244 | /// type, for which isSCEVable must return true. For pointer types, |
| 3245 | /// this is the pointer-sized integer type. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3246 | Type *ScalarEvolution::getEffectiveSCEVType(Type *Ty) const { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3247 | assert(isSCEVable(Ty) && "Type is not SCEVable!"); |
| 3248 | |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3249 | if (Ty->isIntegerTy()) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3250 | return Ty; |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3251 | } |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3252 | |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3253 | // The only other support type is pointer. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3254 | assert(Ty->isPointerTy() && "Unexpected non-pointer non-integer type!"); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3255 | return F.getParent()->getDataLayout().getIntPtrType(Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3256 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3257 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3258 | const SCEV *ScalarEvolution::getCouldNotCompute() { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3259 | return CouldNotCompute.get(); |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 3260 | } |
| 3261 | |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3262 | namespace { |
| 3263 | // Helper class working with SCEVTraversal to figure out if a SCEV contains |
| 3264 | // a SCEVUnknown with null value-pointer. FindInvalidSCEVUnknown::FindOne |
| 3265 | // is set iff if find such SCEVUnknown. |
| 3266 | // |
| 3267 | struct FindInvalidSCEVUnknown { |
| 3268 | bool FindOne; |
| 3269 | FindInvalidSCEVUnknown() { FindOne = false; } |
| 3270 | bool follow(const SCEV *S) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 3271 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3272 | case scConstant: |
| 3273 | return false; |
| 3274 | case scUnknown: |
Shuxin Yang | 23773b3 | 2013-07-12 07:25:38 +0000 | [diff] [blame] | 3275 | if (!cast<SCEVUnknown>(S)->getValue()) |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3276 | FindOne = true; |
| 3277 | return false; |
| 3278 | default: |
| 3279 | return true; |
| 3280 | } |
| 3281 | } |
| 3282 | bool isDone() const { return FindOne; } |
| 3283 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 3284 | } |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3285 | |
| 3286 | bool ScalarEvolution::checkValidity(const SCEV *S) const { |
| 3287 | FindInvalidSCEVUnknown F; |
| 3288 | SCEVTraversal<FindInvalidSCEVUnknown> ST(F); |
| 3289 | ST.visitAll(S); |
| 3290 | |
| 3291 | return !F.FindOne; |
| 3292 | } |
| 3293 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3294 | /// getSCEV - Return an existing SCEV if it exists, otherwise analyze the |
| 3295 | /// expression and create a new one. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3296 | const SCEV *ScalarEvolution::getSCEV(Value *V) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3297 | assert(isSCEVable(V->getType()) && "Value is not SCEVable!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3298 | |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3299 | const SCEV *S = getExistingSCEV(V); |
| 3300 | if (S == nullptr) { |
| 3301 | S = createSCEV(V); |
| 3302 | ValueExprMap.insert(std::make_pair(SCEVCallbackVH(V, this), S)); |
| 3303 | } |
| 3304 | return S; |
| 3305 | } |
| 3306 | |
| 3307 | const SCEV *ScalarEvolution::getExistingSCEV(Value *V) { |
| 3308 | assert(isSCEVable(V->getType()) && "Value is not SCEVable!"); |
| 3309 | |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3310 | ValueExprMapType::iterator I = ValueExprMap.find_as(V); |
| 3311 | if (I != ValueExprMap.end()) { |
| 3312 | const SCEV *S = I->second; |
Shuxin Yang | 23773b3 | 2013-07-12 07:25:38 +0000 | [diff] [blame] | 3313 | if (checkValidity(S)) |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3314 | return S; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3315 | ValueExprMap.erase(I); |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3316 | } |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3317 | return nullptr; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3318 | } |
| 3319 | |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3320 | /// getNegativeSCEV - Return a SCEV corresponding to -V = -1*V |
| 3321 | /// |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3322 | const SCEV *ScalarEvolution::getNegativeSCEV(const SCEV *V, |
| 3323 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3324 | if (const SCEVConstant *VC = dyn_cast<SCEVConstant>(V)) |
Owen Anderson | 53a5221 | 2009-07-13 04:09:18 +0000 | [diff] [blame] | 3325 | return getConstant( |
Owen Anderson | 487375e | 2009-07-29 18:55:55 +0000 | [diff] [blame] | 3326 | cast<ConstantInt>(ConstantExpr::getNeg(VC->getValue()))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3327 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3328 | Type *Ty = V->getType(); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3329 | Ty = getEffectiveSCEVType(Ty); |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3330 | return getMulExpr( |
| 3331 | V, getConstant(cast<ConstantInt>(Constant::getAllOnesValue(Ty))), Flags); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3332 | } |
| 3333 | |
| 3334 | /// getNotSCEV - Return a SCEV corresponding to ~V = -1-V |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3335 | const SCEV *ScalarEvolution::getNotSCEV(const SCEV *V) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3336 | if (const SCEVConstant *VC = dyn_cast<SCEVConstant>(V)) |
Owen Anderson | 542619e | 2009-07-13 20:58:05 +0000 | [diff] [blame] | 3337 | return getConstant( |
Owen Anderson | 487375e | 2009-07-29 18:55:55 +0000 | [diff] [blame] | 3338 | cast<ConstantInt>(ConstantExpr::getNot(VC->getValue()))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3339 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3340 | Type *Ty = V->getType(); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3341 | Ty = getEffectiveSCEVType(Ty); |
Owen Anderson | 542619e | 2009-07-13 20:58:05 +0000 | [diff] [blame] | 3342 | const SCEV *AllOnes = |
Owen Anderson | 5a1acd9 | 2009-07-31 20:28:14 +0000 | [diff] [blame] | 3343 | getConstant(cast<ConstantInt>(Constant::getAllOnesValue(Ty))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3344 | return getMinusSCEV(AllOnes, V); |
| 3345 | } |
| 3346 | |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3347 | /// getMinusSCEV - Return LHS-RHS. Minus is represented in SCEV as A+B*-1. |
Chris Lattner | fc87752 | 2011-01-09 22:26:35 +0000 | [diff] [blame] | 3348 | const SCEV *ScalarEvolution::getMinusSCEV(const SCEV *LHS, const SCEV *RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3349 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | 46f00a2 | 2010-07-20 16:53:00 +0000 | [diff] [blame] | 3350 | // Fast path: X - X --> 0. |
| 3351 | if (LHS == RHS) |
| 3352 | return getConstant(LHS->getType(), 0); |
| 3353 | |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3354 | // We represent LHS - RHS as LHS + (-1)*RHS. This transformation |
| 3355 | // makes it so that we cannot make much use of NUW. |
| 3356 | auto AddFlags = SCEV::FlagAnyWrap; |
| 3357 | const bool RHSIsNotMinSigned = |
| 3358 | !getSignedRange(RHS).getSignedMin().isMinSignedValue(); |
| 3359 | if (maskFlags(Flags, SCEV::FlagNSW) == SCEV::FlagNSW) { |
| 3360 | // Let M be the minimum representable signed value. Then (-1)*RHS |
| 3361 | // signed-wraps if and only if RHS is M. That can happen even for |
| 3362 | // a NSW subtraction because e.g. (-1)*M signed-wraps even though |
| 3363 | // -1 - M does not. So to transfer NSW from LHS - RHS to LHS + |
| 3364 | // (-1)*RHS, we need to prove that RHS != M. |
| 3365 | // |
| 3366 | // If LHS is non-negative and we know that LHS - RHS does not |
| 3367 | // signed-wrap, then RHS cannot be M. So we can rule out signed-wrap |
| 3368 | // either by proving that RHS > M or that LHS >= 0. |
| 3369 | if (RHSIsNotMinSigned || isKnownNonNegative(LHS)) { |
| 3370 | AddFlags = SCEV::FlagNSW; |
| 3371 | } |
| 3372 | } |
| 3373 | |
| 3374 | // FIXME: Find a correct way to transfer NSW to (-1)*M when LHS - |
| 3375 | // RHS is NSW and LHS >= 0. |
| 3376 | // |
| 3377 | // The difficulty here is that the NSW flag may have been proven |
| 3378 | // relative to a loop that is to be found in a recurrence in LHS and |
| 3379 | // not in RHS. Applying NSW to (-1)*M may then let the NSW have a |
| 3380 | // larger scope than intended. |
| 3381 | auto NegFlags = RHSIsNotMinSigned ? SCEV::FlagNSW : SCEV::FlagAnyWrap; |
| 3382 | |
| 3383 | return getAddExpr(LHS, getNegativeSCEV(RHS, NegFlags), AddFlags); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3384 | } |
| 3385 | |
| 3386 | /// getTruncateOrZeroExtend - Return a SCEV corresponding to a conversion of the |
| 3387 | /// input value to the specified type. If the type must be extended, it is zero |
| 3388 | /// extended. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3389 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3390 | ScalarEvolution::getTruncateOrZeroExtend(const SCEV *V, Type *Ty) { |
| 3391 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3392 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3393 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3394 | "Cannot truncate or zero extend with non-integer arguments!"); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3395 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3396 | return V; // No conversion |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3397 | if (getTypeSizeInBits(SrcTy) > getTypeSizeInBits(Ty)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3398 | return getTruncateExpr(V, Ty); |
| 3399 | return getZeroExtendExpr(V, Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3400 | } |
| 3401 | |
| 3402 | /// getTruncateOrSignExtend - Return a SCEV corresponding to a conversion of the |
| 3403 | /// input value to the specified type. If the type must be extended, it is sign |
| 3404 | /// extended. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3405 | const SCEV * |
| 3406 | ScalarEvolution::getTruncateOrSignExtend(const SCEV *V, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3407 | Type *Ty) { |
| 3408 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3409 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3410 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3411 | "Cannot truncate or zero extend with non-integer arguments!"); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3412 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3413 | return V; // No conversion |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3414 | if (getTypeSizeInBits(SrcTy) > getTypeSizeInBits(Ty)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3415 | return getTruncateExpr(V, Ty); |
| 3416 | return getSignExtendExpr(V, Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3417 | } |
| 3418 | |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3419 | /// getNoopOrZeroExtend - Return a SCEV corresponding to a conversion of the |
| 3420 | /// input value to the specified type. If the type must be extended, it is zero |
| 3421 | /// extended. The conversion must not be narrowing. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3422 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3423 | ScalarEvolution::getNoopOrZeroExtend(const SCEV *V, Type *Ty) { |
| 3424 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3425 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3426 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3427 | "Cannot noop or zero extend with non-integer arguments!"); |
| 3428 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3429 | "getNoopOrZeroExtend cannot truncate!"); |
| 3430 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3431 | return V; // No conversion |
| 3432 | return getZeroExtendExpr(V, Ty); |
| 3433 | } |
| 3434 | |
| 3435 | /// getNoopOrSignExtend - Return a SCEV corresponding to a conversion of the |
| 3436 | /// input value to the specified type. If the type must be extended, it is sign |
| 3437 | /// extended. The conversion must not be narrowing. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3438 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3439 | ScalarEvolution::getNoopOrSignExtend(const SCEV *V, Type *Ty) { |
| 3440 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3441 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3442 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3443 | "Cannot noop or sign extend with non-integer arguments!"); |
| 3444 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3445 | "getNoopOrSignExtend cannot truncate!"); |
| 3446 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3447 | return V; // No conversion |
| 3448 | return getSignExtendExpr(V, Ty); |
| 3449 | } |
| 3450 | |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 3451 | /// getNoopOrAnyExtend - Return a SCEV corresponding to a conversion of |
| 3452 | /// the input value to the specified type. If the type must be extended, |
| 3453 | /// it is extended with unspecified bits. The conversion must not be |
| 3454 | /// narrowing. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3455 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3456 | ScalarEvolution::getNoopOrAnyExtend(const SCEV *V, Type *Ty) { |
| 3457 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3458 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3459 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 3460 | "Cannot noop or any extend with non-integer arguments!"); |
| 3461 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3462 | "getNoopOrAnyExtend cannot truncate!"); |
| 3463 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3464 | return V; // No conversion |
| 3465 | return getAnyExtendExpr(V, Ty); |
| 3466 | } |
| 3467 | |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3468 | /// getTruncateOrNoop - Return a SCEV corresponding to a conversion of the |
| 3469 | /// input value to the specified type. The conversion must not be widening. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3470 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3471 | ScalarEvolution::getTruncateOrNoop(const SCEV *V, Type *Ty) { |
| 3472 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3473 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3474 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3475 | "Cannot truncate or noop with non-integer arguments!"); |
| 3476 | assert(getTypeSizeInBits(SrcTy) >= getTypeSizeInBits(Ty) && |
| 3477 | "getTruncateOrNoop cannot extend!"); |
| 3478 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3479 | return V; // No conversion |
| 3480 | return getTruncateExpr(V, Ty); |
| 3481 | } |
| 3482 | |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 3483 | /// getUMaxFromMismatchedTypes - Promote the operands to the wider of |
| 3484 | /// the types using zero-extension, and then perform a umax operation |
| 3485 | /// with them. |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3486 | const SCEV *ScalarEvolution::getUMaxFromMismatchedTypes(const SCEV *LHS, |
| 3487 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3488 | const SCEV *PromotedLHS = LHS; |
| 3489 | const SCEV *PromotedRHS = RHS; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 3490 | |
| 3491 | if (getTypeSizeInBits(LHS->getType()) > getTypeSizeInBits(RHS->getType())) |
| 3492 | PromotedRHS = getZeroExtendExpr(RHS, LHS->getType()); |
| 3493 | else |
| 3494 | PromotedLHS = getNoopOrZeroExtend(LHS, RHS->getType()); |
| 3495 | |
| 3496 | return getUMaxExpr(PromotedLHS, PromotedRHS); |
| 3497 | } |
| 3498 | |
Dan Gohman | 2bc2230 | 2009-06-22 15:03:27 +0000 | [diff] [blame] | 3499 | /// getUMinFromMismatchedTypes - Promote the operands to the wider of |
| 3500 | /// the types using zero-extension, and then perform a umin operation |
| 3501 | /// with them. |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3502 | const SCEV *ScalarEvolution::getUMinFromMismatchedTypes(const SCEV *LHS, |
| 3503 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3504 | const SCEV *PromotedLHS = LHS; |
| 3505 | const SCEV *PromotedRHS = RHS; |
Dan Gohman | 2bc2230 | 2009-06-22 15:03:27 +0000 | [diff] [blame] | 3506 | |
| 3507 | if (getTypeSizeInBits(LHS->getType()) > getTypeSizeInBits(RHS->getType())) |
| 3508 | PromotedRHS = getZeroExtendExpr(RHS, LHS->getType()); |
| 3509 | else |
| 3510 | PromotedLHS = getNoopOrZeroExtend(LHS, RHS->getType()); |
| 3511 | |
| 3512 | return getUMinExpr(PromotedLHS, PromotedRHS); |
| 3513 | } |
| 3514 | |
Andrew Trick | 87716c9 | 2011-03-17 23:51:11 +0000 | [diff] [blame] | 3515 | /// getPointerBase - Transitively follow the chain of pointer-type operands |
| 3516 | /// until reaching a SCEV that does not have a single pointer operand. This |
| 3517 | /// returns a SCEVUnknown pointer for well-formed pointer-type expressions, |
| 3518 | /// but corner cases do exist. |
| 3519 | const SCEV *ScalarEvolution::getPointerBase(const SCEV *V) { |
| 3520 | // A pointer operand may evaluate to a nonpointer expression, such as null. |
| 3521 | if (!V->getType()->isPointerTy()) |
| 3522 | return V; |
| 3523 | |
| 3524 | if (const SCEVCastExpr *Cast = dyn_cast<SCEVCastExpr>(V)) { |
| 3525 | return getPointerBase(Cast->getOperand()); |
| 3526 | } |
| 3527 | else if (const SCEVNAryExpr *NAry = dyn_cast<SCEVNAryExpr>(V)) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3528 | const SCEV *PtrOp = nullptr; |
Andrew Trick | 87716c9 | 2011-03-17 23:51:11 +0000 | [diff] [blame] | 3529 | for (SCEVNAryExpr::op_iterator I = NAry->op_begin(), E = NAry->op_end(); |
| 3530 | I != E; ++I) { |
| 3531 | if ((*I)->getType()->isPointerTy()) { |
| 3532 | // Cannot find the base of an expression with multiple pointer operands. |
| 3533 | if (PtrOp) |
| 3534 | return V; |
| 3535 | PtrOp = *I; |
| 3536 | } |
| 3537 | } |
| 3538 | if (!PtrOp) |
| 3539 | return V; |
| 3540 | return getPointerBase(PtrOp); |
| 3541 | } |
| 3542 | return V; |
| 3543 | } |
| 3544 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3545 | /// PushDefUseChildren - Push users of the given Instruction |
| 3546 | /// onto the given Worklist. |
| 3547 | static void |
| 3548 | PushDefUseChildren(Instruction *I, |
| 3549 | SmallVectorImpl<Instruction *> &Worklist) { |
| 3550 | // Push the def-use children onto the Worklist stack. |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 3551 | for (User *U : I->users()) |
| 3552 | Worklist.push_back(cast<Instruction>(U)); |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3553 | } |
| 3554 | |
| 3555 | /// ForgetSymbolicValue - This looks up computed SCEV values for all |
| 3556 | /// instructions that depend on the given instruction and removes them from |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3557 | /// the ValueExprMapType map if they reference SymName. This is used during PHI |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3558 | /// resolution. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 3559 | void |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3560 | ScalarEvolution::ForgetSymbolicName(Instruction *PN, const SCEV *SymName) { |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3561 | SmallVector<Instruction *, 16> Worklist; |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3562 | PushDefUseChildren(PN, Worklist); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3563 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3564 | SmallPtrSet<Instruction *, 8> Visited; |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3565 | Visited.insert(PN); |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3566 | while (!Worklist.empty()) { |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3567 | Instruction *I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 3568 | if (!Visited.insert(I).second) |
| 3569 | continue; |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 3570 | |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3571 | ValueExprMapType::iterator It = |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 3572 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3573 | if (It != ValueExprMap.end()) { |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 3574 | const SCEV *Old = It->second; |
| 3575 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3576 | // Short-circuit the def-use traversal if the symbolic name |
| 3577 | // ceases to appear in expressions. |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 3578 | if (Old != SymName && !hasOperand(Old, SymName)) |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3579 | continue; |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 3580 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3581 | // SCEVUnknown for a PHI either means that it has an unrecognized |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3582 | // structure, it's a PHI that's in the progress of being computed |
| 3583 | // by createNodeForPHI, or it's a single-value PHI. In the first case, |
| 3584 | // additional loop trip count information isn't going to change anything. |
| 3585 | // In the second case, createNodeForPHI will perform the necessary |
| 3586 | // updates on its own when it gets to that point. In the third, we do |
| 3587 | // want to forget the SCEVUnknown. |
| 3588 | if (!isa<PHINode>(I) || |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 3589 | !isa<SCEVUnknown>(Old) || |
| 3590 | (I != PN && Old == SymName)) { |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 3591 | forgetMemoizedResults(Old); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3592 | ValueExprMap.erase(It); |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 3593 | } |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3594 | } |
| 3595 | |
| 3596 | PushDefUseChildren(I, Worklist); |
| 3597 | } |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 3598 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3599 | |
| 3600 | /// createNodeForPHI - PHI nodes have two cases. Either the PHI node exists in |
| 3601 | /// a loop header, making it a potential recurrence, or it doesn't. |
| 3602 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3603 | const SCEV *ScalarEvolution::createNodeForPHI(PHINode *PN) { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3604 | if (const Loop *L = LI.getLoopFor(PN->getParent())) |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3605 | if (L->getHeader() == PN->getParent()) { |
| 3606 | // The loop may have multiple entrances or multiple exits; we can analyze |
| 3607 | // this phi as an addrec if it has a unique entry value and a unique |
| 3608 | // backedge value. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3609 | Value *BEValueV = nullptr, *StartValueV = nullptr; |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3610 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { |
| 3611 | Value *V = PN->getIncomingValue(i); |
| 3612 | if (L->contains(PN->getIncomingBlock(i))) { |
| 3613 | if (!BEValueV) { |
| 3614 | BEValueV = V; |
| 3615 | } else if (BEValueV != V) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3616 | BEValueV = nullptr; |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3617 | break; |
| 3618 | } |
| 3619 | } else if (!StartValueV) { |
| 3620 | StartValueV = V; |
| 3621 | } else if (StartValueV != V) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3622 | StartValueV = nullptr; |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3623 | break; |
| 3624 | } |
| 3625 | } |
| 3626 | if (BEValueV && StartValueV) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3627 | // While we are analyzing this PHI node, handle its value symbolically. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3628 | const SCEV *SymbolicName = getUnknown(PN); |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 3629 | assert(ValueExprMap.find_as(PN) == ValueExprMap.end() && |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3630 | "PHI node already processed?"); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3631 | ValueExprMap.insert(std::make_pair(SCEVCallbackVH(PN, this), SymbolicName)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3632 | |
| 3633 | // Using this symbolic name for the PHI, analyze the value coming around |
| 3634 | // the back-edge. |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3635 | const SCEV *BEValue = getSCEV(BEValueV); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3636 | |
| 3637 | // NOTE: If BEValue is loop invariant, we know that the PHI node just |
| 3638 | // has a special value for the first iteration of the loop. |
| 3639 | |
| 3640 | // If the value coming around the backedge is an add with the symbolic |
| 3641 | // value we just inserted, then we found a simple induction variable! |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3642 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(BEValue)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3643 | // If there is a single occurrence of the symbolic value, replace it |
| 3644 | // with a recurrence. |
| 3645 | unsigned FoundIndex = Add->getNumOperands(); |
| 3646 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
| 3647 | if (Add->getOperand(i) == SymbolicName) |
| 3648 | if (FoundIndex == e) { |
| 3649 | FoundIndex = i; |
| 3650 | break; |
| 3651 | } |
| 3652 | |
| 3653 | if (FoundIndex != Add->getNumOperands()) { |
| 3654 | // Create an add with everything but the specified operand. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3655 | SmallVector<const SCEV *, 8> Ops; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3656 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
| 3657 | if (i != FoundIndex) |
| 3658 | Ops.push_back(Add->getOperand(i)); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3659 | const SCEV *Accum = getAddExpr(Ops); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3660 | |
| 3661 | // This is not a valid addrec if the step amount is varying each |
| 3662 | // loop iteration, but is not itself an addrec in this loop. |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 3663 | if (isLoopInvariant(Accum, L) || |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3664 | (isa<SCEVAddRecExpr>(Accum) && |
| 3665 | cast<SCEVAddRecExpr>(Accum)->getLoop() == L)) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3666 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3667 | |
| 3668 | // If the increment doesn't overflow, then neither the addrec nor |
| 3669 | // the post-increment will overflow. |
| 3670 | if (const AddOperator *OBO = dyn_cast<AddOperator>(BEValueV)) { |
Nick Lewycky | b6ef9a1 | 2015-03-13 01:37:52 +0000 | [diff] [blame] | 3671 | if (OBO->getOperand(0) == PN) { |
| 3672 | if (OBO->hasNoUnsignedWrap()) |
| 3673 | Flags = setFlags(Flags, SCEV::FlagNUW); |
| 3674 | if (OBO->hasNoSignedWrap()) |
| 3675 | Flags = setFlags(Flags, SCEV::FlagNSW); |
| 3676 | } |
Benjamin Kramer | 6094f30 | 2013-10-28 07:30:06 +0000 | [diff] [blame] | 3677 | } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(BEValueV)) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3678 | // If the increment is an inbounds GEP, then we know the address |
| 3679 | // space cannot be wrapped around. We cannot make any guarantee |
| 3680 | // about signed or unsigned overflow because pointers are |
| 3681 | // unsigned but we may have a negative index from the base |
Benjamin Kramer | 6094f30 | 2013-10-28 07:30:06 +0000 | [diff] [blame] | 3682 | // pointer. We can guarantee that no unsigned wrap occurs if the |
| 3683 | // indices form a positive value. |
Nick Lewycky | b6ef9a1 | 2015-03-13 01:37:52 +0000 | [diff] [blame] | 3684 | if (GEP->isInBounds() && GEP->getOperand(0) == PN) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 3685 | Flags = setFlags(Flags, SCEV::FlagNW); |
Benjamin Kramer | 6094f30 | 2013-10-28 07:30:06 +0000 | [diff] [blame] | 3686 | |
| 3687 | const SCEV *Ptr = getSCEV(GEP->getPointerOperand()); |
| 3688 | if (isKnownPositive(getMinusSCEV(getSCEV(GEP), Ptr))) |
| 3689 | Flags = setFlags(Flags, SCEV::FlagNUW); |
| 3690 | } |
Sanjoy Das | cb47366 | 2015-01-22 00:48:47 +0000 | [diff] [blame] | 3691 | |
| 3692 | // We cannot transfer nuw and nsw flags from subtraction |
| 3693 | // operations -- sub nuw X, Y is not the same as add nuw X, -Y |
| 3694 | // for instance. |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3695 | } |
| 3696 | |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3697 | const SCEV *StartVal = getSCEV(StartValueV); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3698 | const SCEV *PHISCEV = getAddRecExpr(StartVal, Accum, L, Flags); |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 3699 | |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3700 | // Since the no-wrap flags are on the increment, they apply to the |
| 3701 | // post-incremented value as well. |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 3702 | if (isLoopInvariant(Accum, L)) |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3703 | (void)getAddRecExpr(getAddExpr(StartVal, Accum), |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3704 | Accum, L, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3705 | |
| 3706 | // Okay, for the entire analysis of this edge we assumed the PHI |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3707 | // to be symbolic. We now need to go back and purge all of the |
| 3708 | // entries for the scalars that use the symbolic expression. |
| 3709 | ForgetSymbolicName(PN, SymbolicName); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3710 | ValueExprMap[SCEVCallbackVH(PN, this)] = PHISCEV; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3711 | return PHISCEV; |
| 3712 | } |
| 3713 | } |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3714 | } else if (const SCEVAddRecExpr *AddRec = |
| 3715 | dyn_cast<SCEVAddRecExpr>(BEValue)) { |
Chris Lattner | e8cbdbf | 2006-04-26 18:34:07 +0000 | [diff] [blame] | 3716 | // Otherwise, this could be a loop like this: |
| 3717 | // i = 0; for (j = 1; ..; ++j) { .... i = j; } |
| 3718 | // In this case, j = {1,+,1} and BEValue is j. |
| 3719 | // Because the other in-value of i (0) fits the evolution of BEValue |
| 3720 | // i really is an addrec evolution. |
| 3721 | if (AddRec->getLoop() == L && AddRec->isAffine()) { |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3722 | const SCEV *StartVal = getSCEV(StartValueV); |
Chris Lattner | e8cbdbf | 2006-04-26 18:34:07 +0000 | [diff] [blame] | 3723 | |
| 3724 | // If StartVal = j.start - j.stride, we can use StartVal as the |
| 3725 | // initial step of the addrec evolution. |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3726 | if (StartVal == getMinusSCEV(AddRec->getOperand(0), |
Dan Gohman | 068b793 | 2010-04-11 23:44:58 +0000 | [diff] [blame] | 3727 | AddRec->getOperand(1))) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3728 | // FIXME: For constant StartVal, we should be able to infer |
| 3729 | // no-wrap flags. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3730 | const SCEV *PHISCEV = |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3731 | getAddRecExpr(StartVal, AddRec->getOperand(1), L, |
| 3732 | SCEV::FlagAnyWrap); |
Chris Lattner | e8cbdbf | 2006-04-26 18:34:07 +0000 | [diff] [blame] | 3733 | |
| 3734 | // Okay, for the entire analysis of this edge we assumed the PHI |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 3735 | // to be symbolic. We now need to go back and purge all of the |
| 3736 | // entries for the scalars that use the symbolic expression. |
| 3737 | ForgetSymbolicName(PN, SymbolicName); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 3738 | ValueExprMap[SCEVCallbackVH(PN, this)] = PHISCEV; |
Chris Lattner | e8cbdbf | 2006-04-26 18:34:07 +0000 | [diff] [blame] | 3739 | return PHISCEV; |
| 3740 | } |
| 3741 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3742 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3743 | } |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 3744 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 3745 | |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3746 | // If the PHI has a single incoming value, follow that value, unless the |
| 3747 | // PHI's incoming blocks are in a different loop, in which case doing so |
| 3748 | // risks breaking LCSSA form. Instcombine would normally zap these, but |
| 3749 | // it doesn't have DominatorTree information, so it may miss cases. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3750 | if (Value *V = SimplifyInstruction(PN, F.getParent()->getDataLayout(), &TLI, |
| 3751 | &DT, &AC)) |
| 3752 | if (LI.replacementPreservesLCSSAForm(PN, V)) |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 3753 | return getSCEV(V); |
Duncan Sands | 39d77131 | 2010-11-17 20:49:12 +0000 | [diff] [blame] | 3754 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3755 | // If it's not a loop phi, we can't handle it yet. |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3756 | return getUnknown(PN); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3757 | } |
| 3758 | |
Dan Gohman | ee750d1 | 2009-05-08 20:26:55 +0000 | [diff] [blame] | 3759 | /// createNodeForGEP - Expand GEP instructions into add and multiply |
| 3760 | /// operations. This allows them to be analyzed by regular SCEV code. |
| 3761 | /// |
Dan Gohman | b256ccf | 2009-12-18 02:09:29 +0000 | [diff] [blame] | 3762 | const SCEV *ScalarEvolution::createNodeForGEP(GEPOperator *GEP) { |
Dan Gohman | 2173bd3 | 2009-05-08 20:36:47 +0000 | [diff] [blame] | 3763 | Value *Base = GEP->getOperand(0); |
Dan Gohman | 30f24fe | 2009-05-09 00:14:52 +0000 | [diff] [blame] | 3764 | // Don't attempt to analyze GEPs over unsized objects. |
Matt Arsenault | 404c60a | 2013-10-21 19:43:56 +0000 | [diff] [blame] | 3765 | if (!Base->getType()->getPointerElementType()->isSized()) |
Dan Gohman | 30f24fe | 2009-05-09 00:14:52 +0000 | [diff] [blame] | 3766 | return getUnknown(GEP); |
Matt Arsenault | 4c26590 | 2013-09-27 22:38:23 +0000 | [diff] [blame] | 3767 | |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3768 | SmallVector<const SCEV *, 4> IndexExprs; |
| 3769 | for (auto Index = GEP->idx_begin(); Index != GEP->idx_end(); ++Index) |
| 3770 | IndexExprs.push_back(getSCEV(*Index)); |
| 3771 | return getGEPExpr(GEP->getSourceElementType(), getSCEV(Base), IndexExprs, |
| 3772 | GEP->isInBounds()); |
Dan Gohman | ee750d1 | 2009-05-08 20:26:55 +0000 | [diff] [blame] | 3773 | } |
| 3774 | |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3775 | /// GetMinTrailingZeros - Determine the minimum number of zero bits that S is |
| 3776 | /// guaranteed to end in (at every loop iteration). It is, at the same time, |
| 3777 | /// the minimum number of times S is divisible by 2. For example, given {4,+,8} |
| 3778 | /// it returns 2. If S is guaranteed to be 0, it returns the bitwidth of S. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3779 | uint32_t |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3780 | ScalarEvolution::GetMinTrailingZeros(const SCEV *S) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3781 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) |
Chris Lattner | 69ec1ec | 2007-11-23 22:36:49 +0000 | [diff] [blame] | 3782 | return C->getValue()->getValue().countTrailingZeros(); |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 3783 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3784 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(S)) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3785 | return std::min(GetMinTrailingZeros(T->getOperand()), |
| 3786 | (uint32_t)getTypeSizeInBits(T->getType())); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3787 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3788 | if (const SCEVZeroExtendExpr *E = dyn_cast<SCEVZeroExtendExpr>(S)) { |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3789 | uint32_t OpRes = GetMinTrailingZeros(E->getOperand()); |
| 3790 | return OpRes == getTypeSizeInBits(E->getOperand()->getType()) ? |
| 3791 | getTypeSizeInBits(E->getType()) : OpRes; |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3792 | } |
| 3793 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3794 | if (const SCEVSignExtendExpr *E = dyn_cast<SCEVSignExtendExpr>(S)) { |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3795 | uint32_t OpRes = GetMinTrailingZeros(E->getOperand()); |
| 3796 | return OpRes == getTypeSizeInBits(E->getOperand()->getType()) ? |
| 3797 | getTypeSizeInBits(E->getType()) : OpRes; |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3798 | } |
| 3799 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3800 | if (const SCEVAddExpr *A = dyn_cast<SCEVAddExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3801 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3802 | uint32_t MinOpRes = GetMinTrailingZeros(A->getOperand(0)); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3803 | for (unsigned i = 1, e = A->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3804 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(A->getOperand(i))); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3805 | return MinOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 3806 | } |
| 3807 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3808 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3809 | // The result is the sum of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3810 | uint32_t SumOpRes = GetMinTrailingZeros(M->getOperand(0)); |
| 3811 | uint32_t BitWidth = getTypeSizeInBits(M->getType()); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3812 | for (unsigned i = 1, e = M->getNumOperands(); |
| 3813 | SumOpRes != BitWidth && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3814 | SumOpRes = std::min(SumOpRes + GetMinTrailingZeros(M->getOperand(i)), |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3815 | BitWidth); |
| 3816 | return SumOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 3817 | } |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3818 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3819 | if (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3820 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3821 | uint32_t MinOpRes = GetMinTrailingZeros(A->getOperand(0)); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3822 | for (unsigned i = 1, e = A->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3823 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(A->getOperand(i))); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3824 | return MinOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 3825 | } |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3826 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3827 | if (const SCEVSMaxExpr *M = dyn_cast<SCEVSMaxExpr>(S)) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3828 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3829 | uint32_t MinOpRes = GetMinTrailingZeros(M->getOperand(0)); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3830 | for (unsigned i = 1, e = M->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3831 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(M->getOperand(i))); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3832 | return MinOpRes; |
| 3833 | } |
| 3834 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3835 | if (const SCEVUMaxExpr *M = dyn_cast<SCEVUMaxExpr>(S)) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3836 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3837 | uint32_t MinOpRes = GetMinTrailingZeros(M->getOperand(0)); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3838 | for (unsigned i = 1, e = M->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3839 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(M->getOperand(i))); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3840 | return MinOpRes; |
| 3841 | } |
| 3842 | |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3843 | if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { |
| 3844 | // For a SCEVUnknown, ask ValueTracking. |
| 3845 | unsigned BitWidth = getTypeSizeInBits(U->getType()); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3846 | APInt Zeros(BitWidth, 0), Ones(BitWidth, 0); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3847 | computeKnownBits(U->getValue(), Zeros, Ones, F.getParent()->getDataLayout(), |
| 3848 | 0, &AC, nullptr, &DT); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3849 | return Zeros.countTrailingOnes(); |
| 3850 | } |
| 3851 | |
| 3852 | // SCEVUDivExpr |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 3853 | return 0; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 3854 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3855 | |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 3856 | /// GetRangeFromMetadata - Helper method to assign a range to V from |
| 3857 | /// metadata present in the IR. |
| 3858 | static Optional<ConstantRange> GetRangeFromMetadata(Value *V) { |
| 3859 | if (Instruction *I = dyn_cast<Instruction>(V)) { |
Duncan P. N. Exon Smith | de36e80 | 2014-11-11 21:30:22 +0000 | [diff] [blame] | 3860 | if (MDNode *MD = I->getMetadata(LLVMContext::MD_range)) { |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 3861 | ConstantRange TotalRange( |
| 3862 | cast<IntegerType>(I->getType())->getBitWidth(), false); |
| 3863 | |
| 3864 | unsigned NumRanges = MD->getNumOperands() / 2; |
| 3865 | assert(NumRanges >= 1); |
| 3866 | |
| 3867 | for (unsigned i = 0; i < NumRanges; ++i) { |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 3868 | ConstantInt *Lower = |
| 3869 | mdconst::extract<ConstantInt>(MD->getOperand(2 * i + 0)); |
| 3870 | ConstantInt *Upper = |
| 3871 | mdconst::extract<ConstantInt>(MD->getOperand(2 * i + 1)); |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 3872 | ConstantRange Range(Lower->getValue(), Upper->getValue()); |
| 3873 | TotalRange = TotalRange.unionWith(Range); |
| 3874 | } |
| 3875 | |
| 3876 | return TotalRange; |
| 3877 | } |
| 3878 | } |
| 3879 | |
| 3880 | return None; |
| 3881 | } |
| 3882 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3883 | /// getRange - Determine the range for a particular SCEV. If SignHint is |
| 3884 | /// HINT_RANGE_UNSIGNED (resp. HINT_RANGE_SIGNED) then getRange prefers ranges |
| 3885 | /// with a "cleaner" unsigned (resp. signed) representation. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3886 | /// |
| 3887 | ConstantRange |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3888 | ScalarEvolution::getRange(const SCEV *S, |
| 3889 | ScalarEvolution::RangeSignHint SignHint) { |
| 3890 | DenseMap<const SCEV *, ConstantRange> &Cache = |
| 3891 | SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED ? UnsignedRanges |
| 3892 | : SignedRanges; |
| 3893 | |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 3894 | // See if we've computed this range already. |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3895 | DenseMap<const SCEV *, ConstantRange>::iterator I = Cache.find(S); |
| 3896 | if (I != Cache.end()) |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 3897 | return I->second; |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3898 | |
| 3899 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3900 | return setRange(C, SignHint, ConstantRange(C->getValue()->getValue())); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 3901 | |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 3902 | unsigned BitWidth = getTypeSizeInBits(S->getType()); |
| 3903 | ConstantRange ConservativeResult(BitWidth, /*isFullSet=*/true); |
| 3904 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3905 | // If the value has known zeros, the maximum value will have those known zeros |
| 3906 | // as well. |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 3907 | uint32_t TZ = GetMinTrailingZeros(S); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3908 | if (TZ != 0) { |
| 3909 | if (SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED) |
| 3910 | ConservativeResult = |
| 3911 | ConstantRange(APInt::getMinValue(BitWidth), |
| 3912 | APInt::getMaxValue(BitWidth).lshr(TZ).shl(TZ) + 1); |
| 3913 | else |
| 3914 | ConservativeResult = ConstantRange( |
| 3915 | APInt::getSignedMinValue(BitWidth), |
| 3916 | APInt::getSignedMaxValue(BitWidth).ashr(TZ).shl(TZ) + 1); |
| 3917 | } |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 3918 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3919 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3920 | ConstantRange X = getRange(Add->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3921 | for (unsigned i = 1, e = Add->getNumOperands(); i != e; ++i) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3922 | X = X.add(getRange(Add->getOperand(i), SignHint)); |
| 3923 | return setRange(Add, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3924 | } |
| 3925 | |
| 3926 | if (const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3927 | ConstantRange X = getRange(Mul->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3928 | for (unsigned i = 1, e = Mul->getNumOperands(); i != e; ++i) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3929 | X = X.multiply(getRange(Mul->getOperand(i), SignHint)); |
| 3930 | return setRange(Mul, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3931 | } |
| 3932 | |
| 3933 | if (const SCEVSMaxExpr *SMax = dyn_cast<SCEVSMaxExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3934 | ConstantRange X = getRange(SMax->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3935 | for (unsigned i = 1, e = SMax->getNumOperands(); i != e; ++i) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3936 | X = X.smax(getRange(SMax->getOperand(i), SignHint)); |
| 3937 | return setRange(SMax, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3938 | } |
| 3939 | |
| 3940 | if (const SCEVUMaxExpr *UMax = dyn_cast<SCEVUMaxExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3941 | ConstantRange X = getRange(UMax->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3942 | for (unsigned i = 1, e = UMax->getNumOperands(); i != e; ++i) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3943 | X = X.umax(getRange(UMax->getOperand(i), SignHint)); |
| 3944 | return setRange(UMax, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3945 | } |
| 3946 | |
| 3947 | if (const SCEVUDivExpr *UDiv = dyn_cast<SCEVUDivExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3948 | ConstantRange X = getRange(UDiv->getLHS(), SignHint); |
| 3949 | ConstantRange Y = getRange(UDiv->getRHS(), SignHint); |
| 3950 | return setRange(UDiv, SignHint, |
| 3951 | ConservativeResult.intersectWith(X.udiv(Y))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3952 | } |
| 3953 | |
| 3954 | if (const SCEVZeroExtendExpr *ZExt = dyn_cast<SCEVZeroExtendExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3955 | ConstantRange X = getRange(ZExt->getOperand(), SignHint); |
| 3956 | return setRange(ZExt, SignHint, |
| 3957 | ConservativeResult.intersectWith(X.zeroExtend(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3958 | } |
| 3959 | |
| 3960 | if (const SCEVSignExtendExpr *SExt = dyn_cast<SCEVSignExtendExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3961 | ConstantRange X = getRange(SExt->getOperand(), SignHint); |
| 3962 | return setRange(SExt, SignHint, |
| 3963 | ConservativeResult.intersectWith(X.signExtend(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3964 | } |
| 3965 | |
| 3966 | if (const SCEVTruncateExpr *Trunc = dyn_cast<SCEVTruncateExpr>(S)) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 3967 | ConstantRange X = getRange(Trunc->getOperand(), SignHint); |
| 3968 | return setRange(Trunc, SignHint, |
| 3969 | ConservativeResult.intersectWith(X.truncate(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3970 | } |
| 3971 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3972 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(S)) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3973 | // If there's no unsigned wrap, the value will never be less than its |
| 3974 | // initial value. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3975 | if (AddRec->getNoWrapFlags(SCEV::FlagNUW)) |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3976 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(AddRec->getStart())) |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 3977 | if (!C->getValue()->isZero()) |
Dan Gohman | ae4a414 | 2010-04-11 22:12:18 +0000 | [diff] [blame] | 3978 | ConservativeResult = |
Dan Gohman | 9396b42 | 2010-06-30 06:58:35 +0000 | [diff] [blame] | 3979 | ConservativeResult.intersectWith( |
| 3980 | ConstantRange(C->getValue()->getValue(), APInt(BitWidth, 0))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 3981 | |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3982 | // If there's no signed wrap, and all the operands have the same sign or |
| 3983 | // zero, the value won't ever change sign. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3984 | if (AddRec->getNoWrapFlags(SCEV::FlagNSW)) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3985 | bool AllNonNeg = true; |
| 3986 | bool AllNonPos = true; |
| 3987 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) { |
| 3988 | if (!isKnownNonNegative(AddRec->getOperand(i))) AllNonNeg = false; |
| 3989 | if (!isKnownNonPositive(AddRec->getOperand(i))) AllNonPos = false; |
| 3990 | } |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3991 | if (AllNonNeg) |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 3992 | ConservativeResult = ConservativeResult.intersectWith( |
| 3993 | ConstantRange(APInt(BitWidth, 0), |
| 3994 | APInt::getSignedMinValue(BitWidth))); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3995 | else if (AllNonPos) |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 3996 | ConservativeResult = ConservativeResult.intersectWith( |
| 3997 | ConstantRange(APInt::getSignedMinValue(BitWidth), |
| 3998 | APInt(BitWidth, 1))); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3999 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 4000 | |
| 4001 | // TODO: non-affine addrec |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 4002 | if (AddRec->isAffine()) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 4003 | Type *Ty = AddRec->getType(); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 4004 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(AddRec->getLoop()); |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 4005 | if (!isa<SCEVCouldNotCompute>(MaxBECount) && |
| 4006 | getTypeSizeInBits(MaxBECount->getType()) <= BitWidth) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4007 | |
| 4008 | // Check for overflow. This must be done with ConstantRange arithmetic |
| 4009 | // because we could be called from within the ScalarEvolution overflow |
| 4010 | // checking code. |
| 4011 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 4012 | MaxBECount = getNoopOrZeroExtend(MaxBECount, Ty); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4013 | ConstantRange MaxBECountRange = getUnsignedRange(MaxBECount); |
| 4014 | ConstantRange ZExtMaxBECountRange = |
| 4015 | MaxBECountRange.zextOrTrunc(BitWidth * 2 + 1); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 4016 | |
| 4017 | const SCEV *Start = AddRec->getStart(); |
Dan Gohman | f76210e | 2010-04-12 07:39:33 +0000 | [diff] [blame] | 4018 | const SCEV *Step = AddRec->getStepRecurrence(*this); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4019 | ConstantRange StepSRange = getSignedRange(Step); |
| 4020 | ConstantRange SExtStepSRange = StepSRange.sextOrTrunc(BitWidth * 2 + 1); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 4021 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4022 | ConstantRange StartURange = getUnsignedRange(Start); |
| 4023 | ConstantRange EndURange = |
| 4024 | StartURange.add(MaxBECountRange.multiply(StepSRange)); |
Dan Gohman | f76210e | 2010-04-12 07:39:33 +0000 | [diff] [blame] | 4025 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4026 | // Check for unsigned overflow. |
| 4027 | ConstantRange ZExtStartURange = |
| 4028 | StartURange.zextOrTrunc(BitWidth * 2 + 1); |
| 4029 | ConstantRange ZExtEndURange = EndURange.zextOrTrunc(BitWidth * 2 + 1); |
| 4030 | if (ZExtStartURange.add(ZExtMaxBECountRange.multiply(SExtStepSRange)) == |
| 4031 | ZExtEndURange) { |
| 4032 | APInt Min = APIntOps::umin(StartURange.getUnsignedMin(), |
| 4033 | EndURange.getUnsignedMin()); |
| 4034 | APInt Max = APIntOps::umax(StartURange.getUnsignedMax(), |
| 4035 | EndURange.getUnsignedMax()); |
| 4036 | bool IsFullRange = Min.isMinValue() && Max.isMaxValue(); |
| 4037 | if (!IsFullRange) |
| 4038 | ConservativeResult = |
| 4039 | ConservativeResult.intersectWith(ConstantRange(Min, Max + 1)); |
| 4040 | } |
Dan Gohman | f76210e | 2010-04-12 07:39:33 +0000 | [diff] [blame] | 4041 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4042 | ConstantRange StartSRange = getSignedRange(Start); |
| 4043 | ConstantRange EndSRange = |
| 4044 | StartSRange.add(MaxBECountRange.multiply(StepSRange)); |
| 4045 | |
| 4046 | // Check for signed overflow. This must be done with ConstantRange |
| 4047 | // arithmetic because we could be called from within the ScalarEvolution |
| 4048 | // overflow checking code. |
| 4049 | ConstantRange SExtStartSRange = |
| 4050 | StartSRange.sextOrTrunc(BitWidth * 2 + 1); |
| 4051 | ConstantRange SExtEndSRange = EndSRange.sextOrTrunc(BitWidth * 2 + 1); |
| 4052 | if (SExtStartSRange.add(ZExtMaxBECountRange.multiply(SExtStepSRange)) == |
| 4053 | SExtEndSRange) { |
| 4054 | APInt Min = APIntOps::smin(StartSRange.getSignedMin(), |
| 4055 | EndSRange.getSignedMin()); |
| 4056 | APInt Max = APIntOps::smax(StartSRange.getSignedMax(), |
| 4057 | EndSRange.getSignedMax()); |
| 4058 | bool IsFullRange = Min.isMinSignedValue() && Max.isMaxSignedValue(); |
| 4059 | if (!IsFullRange) |
| 4060 | ConservativeResult = |
| 4061 | ConservativeResult.intersectWith(ConstantRange(Min, Max + 1)); |
| 4062 | } |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 4063 | } |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 4064 | } |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 4065 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4066 | return setRange(AddRec, SignHint, ConservativeResult); |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 4067 | } |
| 4068 | |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 4069 | if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 4070 | // Check if the IR explicitly contains !range metadata. |
| 4071 | Optional<ConstantRange> MDRange = GetRangeFromMetadata(U->getValue()); |
| 4072 | if (MDRange.hasValue()) |
| 4073 | ConservativeResult = ConservativeResult.intersectWith(MDRange.getValue()); |
| 4074 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4075 | // Split here to avoid paying the compile-time cost of calling both |
| 4076 | // computeKnownBits and ComputeNumSignBits. This restriction can be lifted |
| 4077 | // if needed. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4078 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4079 | if (SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED) { |
| 4080 | // For a SCEVUnknown, ask ValueTracking. |
| 4081 | APInt Zeros(BitWidth, 0), Ones(BitWidth, 0); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4082 | computeKnownBits(U->getValue(), Zeros, Ones, DL, 0, &AC, nullptr, &DT); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4083 | if (Ones != ~Zeros + 1) |
| 4084 | ConservativeResult = |
| 4085 | ConservativeResult.intersectWith(ConstantRange(Ones, ~Zeros + 1)); |
| 4086 | } else { |
| 4087 | assert(SignHint == ScalarEvolution::HINT_RANGE_SIGNED && |
| 4088 | "generalize as needed!"); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4089 | unsigned NS = ComputeNumSignBits(U->getValue(), DL, 0, &AC, nullptr, &DT); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 4090 | if (NS > 1) |
| 4091 | ConservativeResult = ConservativeResult.intersectWith( |
| 4092 | ConstantRange(APInt::getSignedMinValue(BitWidth).ashr(NS - 1), |
| 4093 | APInt::getSignedMaxValue(BitWidth).ashr(NS - 1) + 1)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4094 | } |
| 4095 | |
| 4096 | return setRange(U, SignHint, ConservativeResult); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 4097 | } |
| 4098 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 4099 | return setRange(S, SignHint, ConservativeResult); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 4100 | } |
| 4101 | |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4102 | SCEV::NoWrapFlags ScalarEvolution::getNoWrapFlagsFromUB(const Value *V) { |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4103 | if (isa<ConstantExpr>(V)) return SCEV::FlagAnyWrap; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4104 | const BinaryOperator *BinOp = cast<BinaryOperator>(V); |
| 4105 | |
| 4106 | // Return early if there are no flags to propagate to the SCEV. |
| 4107 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
| 4108 | if (BinOp->hasNoUnsignedWrap()) |
| 4109 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNUW); |
| 4110 | if (BinOp->hasNoSignedWrap()) |
| 4111 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNSW); |
| 4112 | if (Flags == SCEV::FlagAnyWrap) { |
| 4113 | return SCEV::FlagAnyWrap; |
| 4114 | } |
| 4115 | |
| 4116 | // Here we check that BinOp is in the header of the innermost loop |
| 4117 | // containing BinOp, since we only deal with instructions in the loop |
| 4118 | // header. The actual loop we need to check later will come from an add |
| 4119 | // recurrence, but getting that requires computing the SCEV of the operands, |
| 4120 | // which can be expensive. This check we can do cheaply to rule out some |
| 4121 | // cases early. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4122 | Loop *innermostContainingLoop = LI.getLoopFor(BinOp->getParent()); |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4123 | if (innermostContainingLoop == nullptr || |
| 4124 | innermostContainingLoop->getHeader() != BinOp->getParent()) |
| 4125 | return SCEV::FlagAnyWrap; |
| 4126 | |
| 4127 | // Only proceed if we can prove that BinOp does not yield poison. |
| 4128 | if (!isKnownNotFullPoison(BinOp)) return SCEV::FlagAnyWrap; |
| 4129 | |
| 4130 | // At this point we know that if V is executed, then it does not wrap |
| 4131 | // according to at least one of NSW or NUW. If V is not executed, then we do |
| 4132 | // not know if the calculation that V represents would wrap. Multiple |
| 4133 | // instructions can map to the same SCEV. If we apply NSW or NUW from V to |
| 4134 | // the SCEV, we must guarantee no wrapping for that SCEV also when it is |
| 4135 | // derived from other instructions that map to the same SCEV. We cannot make |
| 4136 | // that guarantee for cases where V is not executed. So we need to find the |
| 4137 | // loop that V is considered in relation to and prove that V is executed for |
| 4138 | // every iteration of that loop. That implies that the value that V |
| 4139 | // calculates does not wrap anywhere in the loop, so then we can apply the |
| 4140 | // flags to the SCEV. |
| 4141 | // |
| 4142 | // We check isLoopInvariant to disambiguate in case we are adding two |
| 4143 | // recurrences from different loops, so that we know which loop to prove |
| 4144 | // that V is executed in. |
| 4145 | for (int OpIndex = 0; OpIndex < 2; ++OpIndex) { |
| 4146 | const SCEV *Op = getSCEV(BinOp->getOperand(OpIndex)); |
| 4147 | if (auto *AddRec = dyn_cast<SCEVAddRecExpr>(Op)) { |
| 4148 | const int OtherOpIndex = 1 - OpIndex; |
| 4149 | const SCEV *OtherOp = getSCEV(BinOp->getOperand(OtherOpIndex)); |
| 4150 | if (isLoopInvariant(OtherOp, AddRec->getLoop()) && |
| 4151 | isGuaranteedToExecuteForEveryIteration(BinOp, AddRec->getLoop())) |
| 4152 | return Flags; |
| 4153 | } |
| 4154 | } |
| 4155 | return SCEV::FlagAnyWrap; |
| 4156 | } |
| 4157 | |
| 4158 | /// createSCEV - We know that there is no SCEV for the specified value. Analyze |
| 4159 | /// the expression. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4160 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 4161 | const SCEV *ScalarEvolution::createSCEV(Value *V) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 4162 | if (!isSCEVable(V->getType())) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4163 | return getUnknown(V); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 4164 | |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4165 | unsigned Opcode = Instruction::UserOp1; |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 4166 | if (Instruction *I = dyn_cast<Instruction>(V)) { |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4167 | Opcode = I->getOpcode(); |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 4168 | |
| 4169 | // Don't attempt to analyze instructions in blocks that aren't |
| 4170 | // reachable. Such instructions don't matter, and they aren't required |
| 4171 | // to obey basic rules for definitions dominating uses which this |
| 4172 | // analysis depends on. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4173 | if (!DT.isReachableFromEntry(I->getParent())) |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 4174 | return getUnknown(V); |
| 4175 | } else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4176 | Opcode = CE->getOpcode(); |
Dan Gohman | f436bac | 2009-06-24 00:54:57 +0000 | [diff] [blame] | 4177 | else if (ConstantInt *CI = dyn_cast<ConstantInt>(V)) |
| 4178 | return getConstant(CI); |
| 4179 | else if (isa<ConstantPointerNull>(V)) |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 4180 | return getConstant(V->getType(), 0); |
Dan Gohman | f161e06e | 2009-08-25 17:49:57 +0000 | [diff] [blame] | 4181 | else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) |
| 4182 | return GA->mayBeOverridden() ? getUnknown(V) : getSCEV(GA->getAliasee()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4183 | else |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4184 | return getUnknown(V); |
Chris Lattner | a3e0bb4 | 2007-04-02 05:41:38 +0000 | [diff] [blame] | 4185 | |
Dan Gohman | 80ca01c | 2009-07-17 20:47:02 +0000 | [diff] [blame] | 4186 | Operator *U = cast<Operator>(V); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4187 | switch (Opcode) { |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4188 | case Instruction::Add: { |
| 4189 | // The simple thing to do would be to just call getSCEV on both operands |
| 4190 | // and call getAddExpr with the result. However if we're looking at a |
| 4191 | // bunch of things all added together, this can be quite inefficient, |
| 4192 | // because it leads to N-1 getAddExpr calls for N ultimate operands. |
| 4193 | // Instead, gather up all the operands and make a single getAddExpr call. |
| 4194 | // LLVM IR canonical form means we need only traverse the left operands. |
| 4195 | SmallVector<const SCEV *, 4> AddOps; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4196 | for (Value *Op = U;; Op = U->getOperand(0)) { |
| 4197 | U = dyn_cast<Operator>(Op); |
| 4198 | unsigned Opcode = U ? U->getOpcode() : 0; |
| 4199 | if (!U || (Opcode != Instruction::Add && Opcode != Instruction::Sub)) { |
| 4200 | assert(Op != V && "V should be an add"); |
| 4201 | AddOps.push_back(getSCEV(Op)); |
Dan Gohman | 47308d5 | 2010-08-31 22:53:17 +0000 | [diff] [blame] | 4202 | break; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4203 | } |
| 4204 | |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4205 | if (auto *OpSCEV = getExistingSCEV(U)) { |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4206 | AddOps.push_back(OpSCEV); |
| 4207 | break; |
| 4208 | } |
| 4209 | |
| 4210 | // If a NUW or NSW flag can be applied to the SCEV for this |
| 4211 | // addition, then compute the SCEV for this addition by itself |
| 4212 | // with a separate call to getAddExpr. We need to do that |
| 4213 | // instead of pushing the operands of the addition onto AddOps, |
| 4214 | // since the flags are only known to apply to this particular |
| 4215 | // addition - they may not apply to other additions that can be |
| 4216 | // formed with operands from AddOps. |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4217 | const SCEV *RHS = getSCEV(U->getOperand(1)); |
| 4218 | SCEV::NoWrapFlags Flags = getNoWrapFlagsFromUB(U); |
| 4219 | if (Flags != SCEV::FlagAnyWrap) { |
| 4220 | const SCEV *LHS = getSCEV(U->getOperand(0)); |
| 4221 | if (Opcode == Instruction::Sub) |
| 4222 | AddOps.push_back(getMinusSCEV(LHS, RHS, Flags)); |
| 4223 | else |
| 4224 | AddOps.push_back(getAddExpr(LHS, RHS, Flags)); |
| 4225 | break; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4226 | } |
| 4227 | |
Dan Gohman | 47308d5 | 2010-08-31 22:53:17 +0000 | [diff] [blame] | 4228 | if (Opcode == Instruction::Sub) |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4229 | AddOps.push_back(getNegativeSCEV(RHS)); |
Dan Gohman | 47308d5 | 2010-08-31 22:53:17 +0000 | [diff] [blame] | 4230 | else |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4231 | AddOps.push_back(RHS); |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4232 | } |
Andrew Trick | d25089f | 2011-11-29 02:16:38 +0000 | [diff] [blame] | 4233 | return getAddExpr(AddOps); |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4234 | } |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 4235 | |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4236 | case Instruction::Mul: { |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4237 | SmallVector<const SCEV *, 4> MulOps; |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4238 | for (Value *Op = U;; Op = U->getOperand(0)) { |
| 4239 | U = dyn_cast<Operator>(Op); |
| 4240 | if (!U || U->getOpcode() != Instruction::Mul) { |
| 4241 | assert(Op != V && "V should be a mul"); |
| 4242 | MulOps.push_back(getSCEV(Op)); |
| 4243 | break; |
| 4244 | } |
| 4245 | |
| 4246 | if (auto *OpSCEV = getExistingSCEV(U)) { |
| 4247 | MulOps.push_back(OpSCEV); |
| 4248 | break; |
| 4249 | } |
| 4250 | |
| 4251 | SCEV::NoWrapFlags Flags = getNoWrapFlagsFromUB(U); |
| 4252 | if (Flags != SCEV::FlagAnyWrap) { |
| 4253 | MulOps.push_back(getMulExpr(getSCEV(U->getOperand(0)), |
| 4254 | getSCEV(U->getOperand(1)), Flags)); |
| 4255 | break; |
| 4256 | } |
| 4257 | |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4258 | MulOps.push_back(getSCEV(U->getOperand(1))); |
| 4259 | } |
Dan Gohman | e5fb103 | 2010-08-16 16:03:49 +0000 | [diff] [blame] | 4260 | return getMulExpr(MulOps); |
| 4261 | } |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4262 | case Instruction::UDiv: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4263 | return getUDivExpr(getSCEV(U->getOperand(0)), |
| 4264 | getSCEV(U->getOperand(1))); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4265 | case Instruction::Sub: |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4266 | return getMinusSCEV(getSCEV(U->getOperand(0)), getSCEV(U->getOperand(1)), |
| 4267 | getNoWrapFlagsFromUB(U)); |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4268 | case Instruction::And: |
| 4269 | // For an expression like x&255 that merely masks off the high bits, |
| 4270 | // use zext(trunc(x)) as the SCEV expression. |
| 4271 | if (ConstantInt *CI = dyn_cast<ConstantInt>(U->getOperand(1))) { |
Dan Gohman | df19948 | 2009-04-25 17:05:40 +0000 | [diff] [blame] | 4272 | if (CI->isNullValue()) |
| 4273 | return getSCEV(U->getOperand(1)); |
Dan Gohman | 05c1d37 | 2009-04-27 01:41:10 +0000 | [diff] [blame] | 4274 | if (CI->isAllOnesValue()) |
| 4275 | return getSCEV(U->getOperand(0)); |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4276 | const APInt &A = CI->getValue(); |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4277 | |
| 4278 | // Instcombine's ShrinkDemandedConstant may strip bits out of |
| 4279 | // constants, obscuring what would otherwise be a low-bits mask. |
Jay Foad | a0653a3 | 2014-05-14 21:14:37 +0000 | [diff] [blame] | 4280 | // Use computeKnownBits to compute what ShrinkDemandedConstant |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4281 | // knew about to reconstruct a low-bits mask value. |
| 4282 | unsigned LZ = A.countLeadingZeros(); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 4283 | unsigned TZ = A.countTrailingZeros(); |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4284 | unsigned BitWidth = A.getBitWidth(); |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4285 | APInt KnownZero(BitWidth, 0), KnownOne(BitWidth, 0); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 4286 | computeKnownBits(U->getOperand(0), KnownZero, KnownOne, |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4287 | F.getParent()->getDataLayout(), 0, &AC, nullptr, &DT); |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4288 | |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 4289 | APInt EffectiveMask = |
| 4290 | APInt::getLowBitsSet(BitWidth, BitWidth - LZ - TZ).shl(TZ); |
| 4291 | if ((LZ != 0 || TZ != 0) && !((~A & ~KnownZero) & EffectiveMask)) { |
| 4292 | const SCEV *MulCount = getConstant( |
| 4293 | ConstantInt::get(getContext(), APInt::getOneBitSet(BitWidth, TZ))); |
| 4294 | return getMulExpr( |
| 4295 | getZeroExtendExpr( |
| 4296 | getTruncateExpr( |
| 4297 | getUDivExactExpr(getSCEV(U->getOperand(0)), MulCount), |
| 4298 | IntegerType::get(getContext(), BitWidth - LZ - TZ)), |
| 4299 | U->getType()), |
| 4300 | MulCount); |
| 4301 | } |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4302 | } |
| 4303 | break; |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 4304 | |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4305 | case Instruction::Or: |
| 4306 | // If the RHS of the Or is a constant, we may have something like: |
| 4307 | // X*4+1 which got turned into X*4|1. Handle this as an Add so loop |
| 4308 | // optimizations will transparently handle this case. |
| 4309 | // |
| 4310 | // In order for this transformation to be safe, the LHS must be of the |
| 4311 | // form X*(2^n) and the Or constant must be less than 2^n. |
| 4312 | if (ConstantInt *CI = dyn_cast<ConstantInt>(U->getOperand(1))) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 4313 | const SCEV *LHS = getSCEV(U->getOperand(0)); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4314 | const APInt &CIVal = CI->getValue(); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 4315 | if (GetMinTrailingZeros(LHS) >= |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 4316 | (CIVal.getBitWidth() - CIVal.countLeadingZeros())) { |
| 4317 | // Build a plain add SCEV. |
| 4318 | const SCEV *S = getAddExpr(LHS, getSCEV(CI)); |
| 4319 | // If the LHS of the add was an addrec and it has no-wrap flags, |
| 4320 | // transfer the no-wrap flags, since an or won't introduce a wrap. |
| 4321 | if (const SCEVAddRecExpr *NewAR = dyn_cast<SCEVAddRecExpr>(S)) { |
| 4322 | const SCEVAddRecExpr *OldAR = cast<SCEVAddRecExpr>(LHS); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 4323 | const_cast<SCEVAddRecExpr *>(NewAR)->setNoWrapFlags( |
| 4324 | OldAR->getNoWrapFlags()); |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 4325 | } |
| 4326 | return S; |
| 4327 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4328 | } |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4329 | break; |
| 4330 | case Instruction::Xor: |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4331 | if (ConstantInt *CI = dyn_cast<ConstantInt>(U->getOperand(1))) { |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 4332 | // If the RHS of the xor is a signbit, then this is just an add. |
| 4333 | // Instcombine turns add of signbit into xor as a strength reduction step. |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4334 | if (CI->getValue().isSignBit()) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4335 | return getAddExpr(getSCEV(U->getOperand(0)), |
| 4336 | getSCEV(U->getOperand(1))); |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 4337 | |
| 4338 | // If the RHS of xor is -1, then this is a not operation. |
Dan Gohman | d277a1e | 2009-05-18 16:17:44 +0000 | [diff] [blame] | 4339 | if (CI->isAllOnesValue()) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4340 | return getNotSCEV(getSCEV(U->getOperand(0))); |
Dan Gohman | 6350296e | 2009-05-18 16:29:04 +0000 | [diff] [blame] | 4341 | |
| 4342 | // Model xor(and(x, C), C) as and(~x, C), if C is a low-bits mask. |
| 4343 | // This is a variant of the check for xor with -1, and it handles |
| 4344 | // the case where instcombine has trimmed non-demanded bits out |
| 4345 | // of an xor with -1. |
| 4346 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(U->getOperand(0))) |
| 4347 | if (ConstantInt *LCI = dyn_cast<ConstantInt>(BO->getOperand(1))) |
| 4348 | if (BO->getOpcode() == Instruction::And && |
| 4349 | LCI->getValue() == CI->getValue()) |
| 4350 | if (const SCEVZeroExtendExpr *Z = |
Dan Gohman | b50f5a4 | 2009-06-17 01:22:39 +0000 | [diff] [blame] | 4351 | dyn_cast<SCEVZeroExtendExpr>(getSCEV(U->getOperand(0)))) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 4352 | Type *UTy = U->getType(); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 4353 | const SCEV *Z0 = Z->getOperand(); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 4354 | Type *Z0Ty = Z0->getType(); |
Dan Gohman | eddf771 | 2009-06-18 00:00:20 +0000 | [diff] [blame] | 4355 | unsigned Z0TySize = getTypeSizeInBits(Z0Ty); |
| 4356 | |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 4357 | // If C is a low-bits mask, the zero extend is serving to |
Dan Gohman | eddf771 | 2009-06-18 00:00:20 +0000 | [diff] [blame] | 4358 | // mask off the high bits. Complement the operand and |
| 4359 | // re-apply the zext. |
| 4360 | if (APIntOps::isMask(Z0TySize, CI->getValue())) |
| 4361 | return getZeroExtendExpr(getNotSCEV(Z0), UTy); |
| 4362 | |
| 4363 | // If C is a single bit, it may be in the sign-bit position |
| 4364 | // before the zero-extend. In this case, represent the xor |
| 4365 | // using an add, which is equivalent, and re-apply the zext. |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 4366 | APInt Trunc = CI->getValue().trunc(Z0TySize); |
| 4367 | if (Trunc.zext(getTypeSizeInBits(UTy)) == CI->getValue() && |
Dan Gohman | eddf771 | 2009-06-18 00:00:20 +0000 | [diff] [blame] | 4368 | Trunc.isSignBit()) |
| 4369 | return getZeroExtendExpr(getAddExpr(Z0, getConstant(Trunc)), |
| 4370 | UTy); |
Dan Gohman | b50f5a4 | 2009-06-17 01:22:39 +0000 | [diff] [blame] | 4371 | } |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4372 | } |
| 4373 | break; |
| 4374 | |
| 4375 | case Instruction::Shl: |
| 4376 | // Turn shift left of a constant amount into a multiply. |
| 4377 | if (ConstantInt *SA = dyn_cast<ConstantInt>(U->getOperand(1))) { |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 4378 | uint32_t BitWidth = cast<IntegerType>(U->getType())->getBitWidth(); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 4379 | |
| 4380 | // If the shift count is not less than the bitwidth, the result of |
| 4381 | // the shift is undefined. Don't try to analyze it, because the |
| 4382 | // resolution chosen here may differ from the resolution chosen in |
| 4383 | // other parts of the compiler. |
| 4384 | if (SA->getValue().uge(BitWidth)) |
| 4385 | break; |
| 4386 | |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4387 | // It is currently not resolved how to interpret NSW for left |
| 4388 | // shift by BitWidth - 1, so we avoid applying flags in that |
| 4389 | // case. Remove this check (or this comment) once the situation |
| 4390 | // is resolved. See |
| 4391 | // http://lists.llvm.org/pipermail/llvm-dev/2015-April/084195.html |
| 4392 | // and http://reviews.llvm.org/D8890 . |
| 4393 | auto Flags = SCEV::FlagAnyWrap; |
| 4394 | if (SA->getValue().ult(BitWidth - 1)) Flags = getNoWrapFlagsFromUB(U); |
| 4395 | |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 4396 | Constant *X = ConstantInt::get(getContext(), |
Benjamin Kramer | fc3ea6f | 2013-07-11 16:05:50 +0000 | [diff] [blame] | 4397 | APInt::getOneBitSet(BitWidth, SA->getZExtValue())); |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 4398 | return getMulExpr(getSCEV(U->getOperand(0)), getSCEV(X), Flags); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4399 | } |
| 4400 | break; |
| 4401 | |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 4402 | case Instruction::LShr: |
Nick Lewycky | 5234830 | 2009-01-13 09:18:58 +0000 | [diff] [blame] | 4403 | // Turn logical shift right of a constant into a unsigned divide. |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 4404 | if (ConstantInt *SA = dyn_cast<ConstantInt>(U->getOperand(1))) { |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 4405 | uint32_t BitWidth = cast<IntegerType>(U->getType())->getBitWidth(); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 4406 | |
| 4407 | // If the shift count is not less than the bitwidth, the result of |
| 4408 | // the shift is undefined. Don't try to analyze it, because the |
| 4409 | // resolution chosen here may differ from the resolution chosen in |
| 4410 | // other parts of the compiler. |
| 4411 | if (SA->getValue().uge(BitWidth)) |
| 4412 | break; |
| 4413 | |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 4414 | Constant *X = ConstantInt::get(getContext(), |
Benjamin Kramer | fc3ea6f | 2013-07-11 16:05:50 +0000 | [diff] [blame] | 4415 | APInt::getOneBitSet(BitWidth, SA->getZExtValue())); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4416 | return getUDivExpr(getSCEV(U->getOperand(0)), getSCEV(X)); |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 4417 | } |
| 4418 | break; |
| 4419 | |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4420 | case Instruction::AShr: |
| 4421 | // For a two-shift sext-inreg, use sext(trunc(x)) as the SCEV expression. |
| 4422 | if (ConstantInt *CI = dyn_cast<ConstantInt>(U->getOperand(1))) |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 4423 | if (Operator *L = dyn_cast<Operator>(U->getOperand(0))) |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4424 | if (L->getOpcode() == Instruction::Shl && |
| 4425 | L->getOperand(1) == U->getOperand(1)) { |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 4426 | uint64_t BitWidth = getTypeSizeInBits(U->getType()); |
| 4427 | |
| 4428 | // If the shift count is not less than the bitwidth, the result of |
| 4429 | // the shift is undefined. Don't try to analyze it, because the |
| 4430 | // resolution chosen here may differ from the resolution chosen in |
| 4431 | // other parts of the compiler. |
| 4432 | if (CI->getValue().uge(BitWidth)) |
| 4433 | break; |
| 4434 | |
Dan Gohman | df19948 | 2009-04-25 17:05:40 +0000 | [diff] [blame] | 4435 | uint64_t Amt = BitWidth - CI->getZExtValue(); |
| 4436 | if (Amt == BitWidth) |
| 4437 | return getSCEV(L->getOperand(0)); // shift by zero --> noop |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4438 | return |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4439 | getSignExtendExpr(getTruncateExpr(getSCEV(L->getOperand(0)), |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 4440 | IntegerType::get(getContext(), |
| 4441 | Amt)), |
| 4442 | U->getType()); |
Dan Gohman | 0ec0537 | 2009-04-21 02:26:00 +0000 | [diff] [blame] | 4443 | } |
| 4444 | break; |
| 4445 | |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4446 | case Instruction::Trunc: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4447 | return getTruncateExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4448 | |
| 4449 | case Instruction::ZExt: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4450 | return getZeroExtendExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4451 | |
| 4452 | case Instruction::SExt: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4453 | return getSignExtendExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4454 | |
| 4455 | case Instruction::BitCast: |
| 4456 | // BitCasts are no-op casts so we just eliminate the cast. |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 4457 | if (isSCEVable(U->getType()) && isSCEVable(U->getOperand(0)->getType())) |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4458 | return getSCEV(U->getOperand(0)); |
| 4459 | break; |
| 4460 | |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 4461 | // It's tempting to handle inttoptr and ptrtoint as no-ops, however this can |
| 4462 | // lead to pointer expressions which cannot safely be expanded to GEPs, |
| 4463 | // because ScalarEvolution doesn't respect the GEP aliasing rules when |
| 4464 | // simplifying integer expressions. |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 4465 | |
Dan Gohman | ee750d1 | 2009-05-08 20:26:55 +0000 | [diff] [blame] | 4466 | case Instruction::GetElementPtr: |
Dan Gohman | b256ccf | 2009-12-18 02:09:29 +0000 | [diff] [blame] | 4467 | return createNodeForGEP(cast<GEPOperator>(U)); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 4468 | |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4469 | case Instruction::PHI: |
| 4470 | return createNodeForPHI(cast<PHINode>(U)); |
| 4471 | |
| 4472 | case Instruction::Select: |
| 4473 | // This could be a smax or umax that was lowered earlier. |
| 4474 | // Try to recover it. |
| 4475 | if (ICmpInst *ICI = dyn_cast<ICmpInst>(U->getOperand(0))) { |
| 4476 | Value *LHS = ICI->getOperand(0); |
| 4477 | Value *RHS = ICI->getOperand(1); |
| 4478 | switch (ICI->getPredicate()) { |
| 4479 | case ICmpInst::ICMP_SLT: |
| 4480 | case ICmpInst::ICMP_SLE: |
| 4481 | std::swap(LHS, RHS); |
| 4482 | // fall through |
| 4483 | case ICmpInst::ICMP_SGT: |
| 4484 | case ICmpInst::ICMP_SGE: |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4485 | // a >s b ? a+x : b+x -> smax(a, b)+x |
| 4486 | // a >s b ? b+x : a+x -> smin(a, b)+x |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 4487 | if (getTypeSizeInBits(LHS->getType()) <= |
| 4488 | getTypeSizeInBits(U->getType())) { |
| 4489 | const SCEV *LS = getNoopOrSignExtend(getSCEV(LHS), U->getType()); |
| 4490 | const SCEV *RS = getNoopOrSignExtend(getSCEV(RHS), U->getType()); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4491 | const SCEV *LA = getSCEV(U->getOperand(1)); |
| 4492 | const SCEV *RA = getSCEV(U->getOperand(2)); |
| 4493 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 4494 | const SCEV *RDiff = getMinusSCEV(RA, RS); |
| 4495 | if (LDiff == RDiff) |
| 4496 | return getAddExpr(getSMaxExpr(LS, RS), LDiff); |
| 4497 | LDiff = getMinusSCEV(LA, RS); |
| 4498 | RDiff = getMinusSCEV(RA, LS); |
| 4499 | if (LDiff == RDiff) |
| 4500 | return getAddExpr(getSMinExpr(LS, RS), LDiff); |
| 4501 | } |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4502 | break; |
| 4503 | case ICmpInst::ICMP_ULT: |
| 4504 | case ICmpInst::ICMP_ULE: |
| 4505 | std::swap(LHS, RHS); |
| 4506 | // fall through |
| 4507 | case ICmpInst::ICMP_UGT: |
| 4508 | case ICmpInst::ICMP_UGE: |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4509 | // a >u b ? a+x : b+x -> umax(a, b)+x |
| 4510 | // a >u b ? b+x : a+x -> umin(a, b)+x |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 4511 | if (getTypeSizeInBits(LHS->getType()) <= |
| 4512 | getTypeSizeInBits(U->getType())) { |
| 4513 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), U->getType()); |
| 4514 | const SCEV *RS = getNoopOrZeroExtend(getSCEV(RHS), U->getType()); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4515 | const SCEV *LA = getSCEV(U->getOperand(1)); |
| 4516 | const SCEV *RA = getSCEV(U->getOperand(2)); |
| 4517 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 4518 | const SCEV *RDiff = getMinusSCEV(RA, RS); |
| 4519 | if (LDiff == RDiff) |
| 4520 | return getAddExpr(getUMaxExpr(LS, RS), LDiff); |
| 4521 | LDiff = getMinusSCEV(LA, RS); |
| 4522 | RDiff = getMinusSCEV(RA, LS); |
| 4523 | if (LDiff == RDiff) |
| 4524 | return getAddExpr(getUMinExpr(LS, RS), LDiff); |
| 4525 | } |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4526 | break; |
Dan Gohman | 4d3c3cf | 2009-06-18 20:21:07 +0000 | [diff] [blame] | 4527 | case ICmpInst::ICMP_NE: |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4528 | // n != 0 ? n+x : 1+x -> umax(n, 1)+x |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 4529 | if (getTypeSizeInBits(LHS->getType()) <= |
| 4530 | getTypeSizeInBits(U->getType()) && |
| 4531 | isa<ConstantInt>(RHS) && cast<ConstantInt>(RHS)->isZero()) { |
| 4532 | const SCEV *One = getConstant(U->getType(), 1); |
| 4533 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), U->getType()); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4534 | const SCEV *LA = getSCEV(U->getOperand(1)); |
| 4535 | const SCEV *RA = getSCEV(U->getOperand(2)); |
| 4536 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 4537 | const SCEV *RDiff = getMinusSCEV(RA, One); |
| 4538 | if (LDiff == RDiff) |
Dan Gohman | cf32f2b | 2010-08-13 20:17:14 +0000 | [diff] [blame] | 4539 | return getAddExpr(getUMaxExpr(One, LS), LDiff); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4540 | } |
Dan Gohman | 4d3c3cf | 2009-06-18 20:21:07 +0000 | [diff] [blame] | 4541 | break; |
| 4542 | case ICmpInst::ICMP_EQ: |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4543 | // n == 0 ? 1+x : n+x -> umax(n, 1)+x |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 4544 | if (getTypeSizeInBits(LHS->getType()) <= |
| 4545 | getTypeSizeInBits(U->getType()) && |
| 4546 | isa<ConstantInt>(RHS) && cast<ConstantInt>(RHS)->isZero()) { |
| 4547 | const SCEV *One = getConstant(U->getType(), 1); |
| 4548 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), U->getType()); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4549 | const SCEV *LA = getSCEV(U->getOperand(1)); |
| 4550 | const SCEV *RA = getSCEV(U->getOperand(2)); |
| 4551 | const SCEV *LDiff = getMinusSCEV(LA, One); |
| 4552 | const SCEV *RDiff = getMinusSCEV(RA, LS); |
| 4553 | if (LDiff == RDiff) |
Dan Gohman | cf32f2b | 2010-08-13 20:17:14 +0000 | [diff] [blame] | 4554 | return getAddExpr(getUMaxExpr(One, LS), LDiff); |
Dan Gohman | f33bac3 | 2010-04-24 03:09:42 +0000 | [diff] [blame] | 4555 | } |
Dan Gohman | 4d3c3cf | 2009-06-18 20:21:07 +0000 | [diff] [blame] | 4556 | break; |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 4557 | default: |
| 4558 | break; |
| 4559 | } |
| 4560 | } |
| 4561 | |
| 4562 | default: // We cannot analyze this expression. |
| 4563 | break; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4564 | } |
| 4565 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 4566 | return getUnknown(V); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4567 | } |
| 4568 | |
| 4569 | |
| 4570 | |
| 4571 | //===----------------------------------------------------------------------===// |
| 4572 | // Iteration Count Computation Code |
| 4573 | // |
| 4574 | |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 4575 | unsigned ScalarEvolution::getSmallConstantTripCount(Loop *L) { |
| 4576 | if (BasicBlock *ExitingBB = L->getExitingBlock()) |
| 4577 | return getSmallConstantTripCount(L, ExitingBB); |
| 4578 | |
| 4579 | // No trip count information for multiple exits. |
| 4580 | return 0; |
| 4581 | } |
| 4582 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4583 | /// getSmallConstantTripCount - Returns the maximum trip count of this loop as a |
Andrew Trick | e81211f | 2012-01-11 06:52:55 +0000 | [diff] [blame] | 4584 | /// normal unsigned value. Returns 0 if the trip count is unknown or not |
| 4585 | /// constant. Will also return 0 if the maximum trip count is very large (>= |
| 4586 | /// 2^32). |
| 4587 | /// |
| 4588 | /// This "trip count" assumes that control exits via ExitingBlock. More |
| 4589 | /// precisely, it is the number of times that control may reach ExitingBlock |
| 4590 | /// before taking the branch. For loops with multiple exits, it may not be the |
| 4591 | /// number times that the loop header executes because the loop may exit |
| 4592 | /// prematurely via another branch. |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 4593 | unsigned ScalarEvolution::getSmallConstantTripCount(Loop *L, |
| 4594 | BasicBlock *ExitingBlock) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 4595 | assert(ExitingBlock && "Must pass a non-null exiting block!"); |
| 4596 | assert(L->isLoopExiting(ExitingBlock) && |
| 4597 | "Exiting block must actually branch out of the loop!"); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4598 | const SCEVConstant *ExitCount = |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 4599 | dyn_cast<SCEVConstant>(getExitCount(L, ExitingBlock)); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4600 | if (!ExitCount) |
| 4601 | return 0; |
| 4602 | |
| 4603 | ConstantInt *ExitConst = ExitCount->getValue(); |
| 4604 | |
| 4605 | // Guard against huge trip counts. |
| 4606 | if (ExitConst->getValue().getActiveBits() > 32) |
| 4607 | return 0; |
| 4608 | |
| 4609 | // In case of integer overflow, this returns 0, which is correct. |
| 4610 | return ((unsigned)ExitConst->getZExtValue()) + 1; |
| 4611 | } |
| 4612 | |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 4613 | unsigned ScalarEvolution::getSmallConstantTripMultiple(Loop *L) { |
| 4614 | if (BasicBlock *ExitingBB = L->getExitingBlock()) |
| 4615 | return getSmallConstantTripMultiple(L, ExitingBB); |
| 4616 | |
| 4617 | // No trip multiple information for multiple exits. |
| 4618 | return 0; |
| 4619 | } |
| 4620 | |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4621 | /// getSmallConstantTripMultiple - Returns the largest constant divisor of the |
| 4622 | /// trip count of this loop as a normal unsigned value, if possible. This |
| 4623 | /// means that the actual trip count is always a multiple of the returned |
| 4624 | /// value (don't forget the trip count could very well be zero as well!). |
| 4625 | /// |
| 4626 | /// Returns 1 if the trip count is unknown or not guaranteed to be the |
| 4627 | /// multiple of a constant (which is also the case if the trip count is simply |
| 4628 | /// constant, use getSmallConstantTripCount for that case), Will also return 1 |
| 4629 | /// if the trip count is very large (>= 2^32). |
Andrew Trick | e81211f | 2012-01-11 06:52:55 +0000 | [diff] [blame] | 4630 | /// |
| 4631 | /// As explained in the comments for getSmallConstantTripCount, this assumes |
| 4632 | /// that control exits the loop via ExitingBlock. |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 4633 | unsigned |
| 4634 | ScalarEvolution::getSmallConstantTripMultiple(Loop *L, |
| 4635 | BasicBlock *ExitingBlock) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 4636 | assert(ExitingBlock && "Must pass a non-null exiting block!"); |
| 4637 | assert(L->isLoopExiting(ExitingBlock) && |
| 4638 | "Exiting block must actually branch out of the loop!"); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 4639 | const SCEV *ExitCount = getExitCount(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4640 | if (ExitCount == getCouldNotCompute()) |
| 4641 | return 1; |
| 4642 | |
| 4643 | // Get the trip count from the BE count by adding 1. |
| 4644 | const SCEV *TCMul = getAddExpr(ExitCount, |
| 4645 | getConstant(ExitCount->getType(), 1)); |
| 4646 | // FIXME: SCEV distributes multiplication as V1*C1 + V2*C1. We could attempt |
| 4647 | // to factor simple cases. |
| 4648 | if (const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(TCMul)) |
| 4649 | TCMul = Mul->getOperand(0); |
| 4650 | |
| 4651 | const SCEVConstant *MulC = dyn_cast<SCEVConstant>(TCMul); |
| 4652 | if (!MulC) |
| 4653 | return 1; |
| 4654 | |
| 4655 | ConstantInt *Result = MulC->getValue(); |
| 4656 | |
Hal Finkel | 30bd934 | 2012-10-24 19:46:44 +0000 | [diff] [blame] | 4657 | // Guard against huge trip counts (this requires checking |
| 4658 | // for zero to handle the case where the trip count == -1 and the |
| 4659 | // addition wraps). |
| 4660 | if (!Result || Result->getValue().getActiveBits() > 32 || |
| 4661 | Result->getValue().getActiveBits() == 0) |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 4662 | return 1; |
| 4663 | |
| 4664 | return (unsigned)Result->getZExtValue(); |
| 4665 | } |
| 4666 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4667 | // getExitCount - Get the expression for the number of loop iterations for which |
Andrew Trick | ee9143a | 2013-05-31 23:34:46 +0000 | [diff] [blame] | 4668 | // this loop is guaranteed not to exit via ExitingBlock. Otherwise return |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4669 | // SCEVCouldNotCompute. |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4670 | const SCEV *ScalarEvolution::getExitCount(Loop *L, BasicBlock *ExitingBlock) { |
| 4671 | return getBackedgeTakenInfo(L).getExact(ExitingBlock, this); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4672 | } |
| 4673 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 4674 | /// getBackedgeTakenCount - If the specified loop has a predictable |
| 4675 | /// backedge-taken count, return it, otherwise return a SCEVCouldNotCompute |
| 4676 | /// object. The backedge-taken count is the number of times the loop header |
| 4677 | /// will be branched to from within the loop. This is one less than the |
| 4678 | /// trip count of the loop, since it doesn't count the first iteration, |
| 4679 | /// when the header is branched to from outside the loop. |
| 4680 | /// |
| 4681 | /// Note that it is not valid to call this method on a loop without a |
| 4682 | /// loop-invariant backedge-taken count (see |
| 4683 | /// hasLoopInvariantBackedgeTakenCount). |
| 4684 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 4685 | const SCEV *ScalarEvolution::getBackedgeTakenCount(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4686 | return getBackedgeTakenInfo(L).getExact(this); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 4687 | } |
| 4688 | |
| 4689 | /// getMaxBackedgeTakenCount - Similar to getBackedgeTakenCount, except |
| 4690 | /// return the least SCEV value that is known never to be less than the |
| 4691 | /// actual backedge taken count. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 4692 | const SCEV *ScalarEvolution::getMaxBackedgeTakenCount(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4693 | return getBackedgeTakenInfo(L).getMax(this); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 4694 | } |
| 4695 | |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4696 | /// PushLoopPHIs - Push PHI nodes in the header of the given loop |
| 4697 | /// onto the given Worklist. |
| 4698 | static void |
| 4699 | PushLoopPHIs(const Loop *L, SmallVectorImpl<Instruction *> &Worklist) { |
| 4700 | BasicBlock *Header = L->getHeader(); |
| 4701 | |
| 4702 | // Push all Loop-header PHIs onto the Worklist stack. |
| 4703 | for (BasicBlock::iterator I = Header->begin(); |
| 4704 | PHINode *PN = dyn_cast<PHINode>(I); ++I) |
| 4705 | Worklist.push_back(PN); |
| 4706 | } |
| 4707 | |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 4708 | const ScalarEvolution::BackedgeTakenInfo & |
| 4709 | ScalarEvolution::getBackedgeTakenInfo(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4710 | // Initially insert an invalid entry for this loop. If the insertion |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 4711 | // succeeds, proceed to actually compute a backedge-taken count and |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 4712 | // update the value. The temporary CouldNotCompute value tells SCEV |
| 4713 | // code elsewhere that it shouldn't attempt to request a new |
| 4714 | // backedge-taken count, which could result in infinite recursion. |
Dan Gohman | 0daf687 | 2011-05-09 18:44:09 +0000 | [diff] [blame] | 4715 | std::pair<DenseMap<const Loop *, BackedgeTakenInfo>::iterator, bool> Pair = |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4716 | BackedgeTakenCounts.insert(std::make_pair(L, BackedgeTakenInfo())); |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4717 | if (!Pair.second) |
| 4718 | return Pair.first->second; |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 4719 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4720 | // ComputeBackedgeTakenCount may allocate memory for its result. Inserting it |
| 4721 | // into the BackedgeTakenCounts map transfers ownership. Otherwise, the result |
| 4722 | // must be cleared in this scope. |
| 4723 | BackedgeTakenInfo Result = ComputeBackedgeTakenCount(L); |
| 4724 | |
| 4725 | if (Result.getExact(this) != getCouldNotCompute()) { |
| 4726 | assert(isLoopInvariant(Result.getExact(this), L) && |
| 4727 | isLoopInvariant(Result.getMax(this), L) && |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4728 | "Computed backedge-taken count isn't loop invariant for loop!"); |
| 4729 | ++NumTripCountsComputed; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4730 | } |
| 4731 | else if (Result.getMax(this) == getCouldNotCompute() && |
| 4732 | isa<PHINode>(L->getHeader()->begin())) { |
| 4733 | // Only count loops that have phi nodes as not being computable. |
| 4734 | ++NumTripCountsNotComputed; |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4735 | } |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 4736 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4737 | // Now that we know more about the trip count for this loop, forget any |
| 4738 | // existing SCEV values for PHI nodes in this loop since they are only |
| 4739 | // conservative estimates made without the benefit of trip count |
| 4740 | // information. This is similar to the code in forgetLoop, except that |
| 4741 | // it handles SCEVUnknown PHI nodes specially. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4742 | if (Result.hasAnyInfo()) { |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4743 | SmallVector<Instruction *, 16> Worklist; |
| 4744 | PushLoopPHIs(L, Worklist); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4745 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4746 | SmallPtrSet<Instruction *, 8> Visited; |
| 4747 | while (!Worklist.empty()) { |
| 4748 | Instruction *I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 4749 | if (!Visited.insert(I).second) |
| 4750 | continue; |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4751 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4752 | ValueExprMapType::iterator It = |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 4753 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4754 | if (It != ValueExprMap.end()) { |
| 4755 | const SCEV *Old = It->second; |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 4756 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4757 | // SCEVUnknown for a PHI either means that it has an unrecognized |
| 4758 | // structure, or it's a PHI that's in the progress of being computed |
| 4759 | // by createNodeForPHI. In the former case, additional loop trip |
| 4760 | // count information isn't going to change anything. In the later |
| 4761 | // case, createNodeForPHI will perform the necessary updates on its |
| 4762 | // own when it gets to that point. |
| 4763 | if (!isa<PHINode>(I) || !isa<SCEVUnknown>(Old)) { |
| 4764 | forgetMemoizedResults(Old); |
| 4765 | ValueExprMap.erase(It); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4766 | } |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4767 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 4768 | ConstantEvolutionLoopExitValue.erase(PN); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4769 | } |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 4770 | |
| 4771 | PushDefUseChildren(I, Worklist); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4772 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4773 | } |
Dan Gohman | 6acd95b | 2011-04-25 22:48:29 +0000 | [diff] [blame] | 4774 | |
| 4775 | // Re-lookup the insert position, since the call to |
| 4776 | // ComputeBackedgeTakenCount above could result in a |
| 4777 | // recusive call to getBackedgeTakenInfo (on a different |
| 4778 | // loop), which would invalidate the iterator computed |
| 4779 | // earlier. |
| 4780 | return BackedgeTakenCounts.find(L)->second = Result; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4781 | } |
| 4782 | |
Dan Gohman | 880c92a | 2009-10-31 15:04:55 +0000 | [diff] [blame] | 4783 | /// forgetLoop - This method should be called by the client when it has |
| 4784 | /// changed a loop in a way that may effect ScalarEvolution's ability to |
| 4785 | /// compute a trip count, or if the loop is deleted. |
| 4786 | void ScalarEvolution::forgetLoop(const Loop *L) { |
| 4787 | // Drop any stored trip count value. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4788 | DenseMap<const Loop*, BackedgeTakenInfo>::iterator BTCPos = |
| 4789 | BackedgeTakenCounts.find(L); |
| 4790 | if (BTCPos != BackedgeTakenCounts.end()) { |
| 4791 | BTCPos->second.clear(); |
| 4792 | BackedgeTakenCounts.erase(BTCPos); |
| 4793 | } |
Dan Gohman | f150572 | 2009-05-02 17:43:35 +0000 | [diff] [blame] | 4794 | |
Dan Gohman | 880c92a | 2009-10-31 15:04:55 +0000 | [diff] [blame] | 4795 | // Drop information about expressions based on loop-header PHIs. |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 4796 | SmallVector<Instruction *, 16> Worklist; |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4797 | PushLoopPHIs(L, Worklist); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 4798 | |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4799 | SmallPtrSet<Instruction *, 8> Visited; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 4800 | while (!Worklist.empty()) { |
| 4801 | Instruction *I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 4802 | if (!Visited.insert(I).second) |
| 4803 | continue; |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4804 | |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 4805 | ValueExprMapType::iterator It = |
| 4806 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 4807 | if (It != ValueExprMap.end()) { |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 4808 | forgetMemoizedResults(It->second); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 4809 | ValueExprMap.erase(It); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 4810 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 4811 | ConstantEvolutionLoopExitValue.erase(PN); |
| 4812 | } |
| 4813 | |
| 4814 | PushDefUseChildren(I, Worklist); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 4815 | } |
Dan Gohman | dcb354b | 2010-10-29 20:16:10 +0000 | [diff] [blame] | 4816 | |
| 4817 | // Forget all contained loops too, to avoid dangling entries in the |
| 4818 | // ValuesAtScopes map. |
| 4819 | for (Loop::iterator I = L->begin(), E = L->end(); I != E; ++I) |
| 4820 | forgetLoop(*I); |
Dan Gohman | 4330034 | 2009-02-17 20:49:49 +0000 | [diff] [blame] | 4821 | } |
| 4822 | |
Eric Christopher | ef6d593 | 2010-07-29 01:25:38 +0000 | [diff] [blame] | 4823 | /// forgetValue - This method should be called by the client when it has |
| 4824 | /// changed a value in a way that may effect its value, or which may |
| 4825 | /// disconnect it from a def-use chain linking it to a loop. |
| 4826 | void ScalarEvolution::forgetValue(Value *V) { |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 4827 | Instruction *I = dyn_cast<Instruction>(V); |
| 4828 | if (!I) return; |
| 4829 | |
| 4830 | // Drop information about expressions based on loop-header PHIs. |
| 4831 | SmallVector<Instruction *, 16> Worklist; |
| 4832 | Worklist.push_back(I); |
| 4833 | |
| 4834 | SmallPtrSet<Instruction *, 8> Visited; |
| 4835 | while (!Worklist.empty()) { |
| 4836 | I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 4837 | if (!Visited.insert(I).second) |
| 4838 | continue; |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 4839 | |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 4840 | ValueExprMapType::iterator It = |
| 4841 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 4842 | if (It != ValueExprMap.end()) { |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 4843 | forgetMemoizedResults(It->second); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 4844 | ValueExprMap.erase(It); |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 4845 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 4846 | ConstantEvolutionLoopExitValue.erase(PN); |
| 4847 | } |
| 4848 | |
| 4849 | PushDefUseChildren(I, Worklist); |
| 4850 | } |
| 4851 | } |
| 4852 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4853 | /// getExact - Get the exact loop backedge taken count considering all loop |
Sanjoy Das | 135e5b9 | 2015-07-21 20:59:22 +0000 | [diff] [blame] | 4854 | /// exits. A computable result can only be returned for loops with a single |
| 4855 | /// exit. Returning the minimum taken count among all exits is incorrect |
| 4856 | /// because one of the loop's exit limit's may have been skipped. HowFarToZero |
| 4857 | /// assumes that the limit of each loop test is never skipped. This is a valid |
| 4858 | /// assumption as long as the loop exits via that test. For precise results, it |
| 4859 | /// is the caller's responsibility to specify the relevant loop exit using |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 4860 | /// getExact(ExitingBlock, SE). |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4861 | const SCEV * |
| 4862 | ScalarEvolution::BackedgeTakenInfo::getExact(ScalarEvolution *SE) const { |
| 4863 | // If any exits were not computable, the loop is not computable. |
| 4864 | if (!ExitNotTaken.isCompleteList()) return SE->getCouldNotCompute(); |
| 4865 | |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 4866 | // We need exactly one computable exit. |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4867 | if (!ExitNotTaken.ExitingBlock) return SE->getCouldNotCompute(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4868 | assert(ExitNotTaken.ExactNotTaken && "uninitialized not-taken info"); |
| 4869 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4870 | const SCEV *BECount = nullptr; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4871 | for (const ExitNotTakenInfo *ENT = &ExitNotTaken; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4872 | ENT != nullptr; ENT = ENT->getNextExit()) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4873 | |
| 4874 | assert(ENT->ExactNotTaken != SE->getCouldNotCompute() && "bad exit SCEV"); |
| 4875 | |
| 4876 | if (!BECount) |
| 4877 | BECount = ENT->ExactNotTaken; |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 4878 | else if (BECount != ENT->ExactNotTaken) |
| 4879 | return SE->getCouldNotCompute(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4880 | } |
Andrew Trick | bbb226a | 2011-09-02 21:20:46 +0000 | [diff] [blame] | 4881 | assert(BECount && "Invalid not taken count for loop exit"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4882 | return BECount; |
| 4883 | } |
| 4884 | |
| 4885 | /// getExact - Get the exact not taken count for this loop exit. |
| 4886 | const SCEV * |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4887 | ScalarEvolution::BackedgeTakenInfo::getExact(BasicBlock *ExitingBlock, |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4888 | ScalarEvolution *SE) const { |
| 4889 | for (const ExitNotTakenInfo *ENT = &ExitNotTaken; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4890 | ENT != nullptr; ENT = ENT->getNextExit()) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4891 | |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4892 | if (ENT->ExitingBlock == ExitingBlock) |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4893 | return ENT->ExactNotTaken; |
| 4894 | } |
| 4895 | return SE->getCouldNotCompute(); |
| 4896 | } |
| 4897 | |
| 4898 | /// getMax - Get the max backedge taken count for the loop. |
| 4899 | const SCEV * |
| 4900 | ScalarEvolution::BackedgeTakenInfo::getMax(ScalarEvolution *SE) const { |
| 4901 | return Max ? Max : SE->getCouldNotCompute(); |
| 4902 | } |
| 4903 | |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 4904 | bool ScalarEvolution::BackedgeTakenInfo::hasOperand(const SCEV *S, |
| 4905 | ScalarEvolution *SE) const { |
| 4906 | if (Max && Max != SE->getCouldNotCompute() && SE->hasOperand(Max, S)) |
| 4907 | return true; |
| 4908 | |
| 4909 | if (!ExitNotTaken.ExitingBlock) |
| 4910 | return false; |
| 4911 | |
| 4912 | for (const ExitNotTakenInfo *ENT = &ExitNotTaken; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4913 | ENT != nullptr; ENT = ENT->getNextExit()) { |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 4914 | |
| 4915 | if (ENT->ExactNotTaken != SE->getCouldNotCompute() |
| 4916 | && SE->hasOperand(ENT->ExactNotTaken, S)) { |
| 4917 | return true; |
| 4918 | } |
| 4919 | } |
| 4920 | return false; |
| 4921 | } |
| 4922 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4923 | /// Allocate memory for BackedgeTakenInfo and copy the not-taken count of each |
| 4924 | /// computable exit into a persistent ExitNotTakenInfo array. |
| 4925 | ScalarEvolution::BackedgeTakenInfo::BackedgeTakenInfo( |
| 4926 | SmallVectorImpl< std::pair<BasicBlock *, const SCEV *> > &ExitCounts, |
| 4927 | bool Complete, const SCEV *MaxCount) : Max(MaxCount) { |
| 4928 | |
| 4929 | if (!Complete) |
| 4930 | ExitNotTaken.setIncomplete(); |
| 4931 | |
| 4932 | unsigned NumExits = ExitCounts.size(); |
| 4933 | if (NumExits == 0) return; |
| 4934 | |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4935 | ExitNotTaken.ExitingBlock = ExitCounts[0].first; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4936 | ExitNotTaken.ExactNotTaken = ExitCounts[0].second; |
| 4937 | if (NumExits == 1) return; |
| 4938 | |
| 4939 | // Handle the rare case of multiple computable exits. |
| 4940 | ExitNotTakenInfo *ENT = new ExitNotTakenInfo[NumExits-1]; |
| 4941 | |
| 4942 | ExitNotTakenInfo *PrevENT = &ExitNotTaken; |
| 4943 | for (unsigned i = 1; i < NumExits; ++i, PrevENT = ENT, ++ENT) { |
| 4944 | PrevENT->setNextExit(ENT); |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 4945 | ENT->ExitingBlock = ExitCounts[i].first; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4946 | ENT->ExactNotTaken = ExitCounts[i].second; |
| 4947 | } |
| 4948 | } |
| 4949 | |
| 4950 | /// clear - Invalidate this result and free the ExitNotTakenInfo array. |
| 4951 | void ScalarEvolution::BackedgeTakenInfo::clear() { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4952 | ExitNotTaken.ExitingBlock = nullptr; |
| 4953 | ExitNotTaken.ExactNotTaken = nullptr; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4954 | delete[] ExitNotTaken.getNextExit(); |
| 4955 | } |
| 4956 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 4957 | /// ComputeBackedgeTakenCount - Compute the number of times the backedge |
| 4958 | /// of the specified loop will execute. |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 4959 | ScalarEvolution::BackedgeTakenInfo |
| 4960 | ScalarEvolution::ComputeBackedgeTakenCount(const Loop *L) { |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 4961 | SmallVector<BasicBlock *, 8> ExitingBlocks; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 4962 | L->getExitingBlocks(ExitingBlocks); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4963 | |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4964 | SmallVector<std::pair<BasicBlock *, const SCEV *>, 4> ExitCounts; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4965 | bool CouldComputeBECount = true; |
Andrew Trick | ee5aa7f | 2014-01-15 06:42:11 +0000 | [diff] [blame] | 4966 | BasicBlock *Latch = L->getLoopLatch(); // may be NULL. |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4967 | const SCEV *MustExitMaxBECount = nullptr; |
| 4968 | const SCEV *MayExitMaxBECount = nullptr; |
| 4969 | |
| 4970 | // Compute the ExitLimit for each loop exit. Use this to populate ExitCounts |
| 4971 | // and compute maxBECount. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 4972 | for (unsigned i = 0, e = ExitingBlocks.size(); i != e; ++i) { |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4973 | BasicBlock *ExitBB = ExitingBlocks[i]; |
| 4974 | ExitLimit EL = ComputeExitLimit(L, ExitBB); |
| 4975 | |
| 4976 | // 1. For each exit that can be computed, add an entry to ExitCounts. |
| 4977 | // CouldComputeBECount is true only if all exits can be computed. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4978 | if (EL.Exact == getCouldNotCompute()) |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 4979 | // We couldn't compute an exact value for this exit, so |
Dan Gohman | 8885b37 | 2009-06-22 21:10:22 +0000 | [diff] [blame] | 4980 | // we won't be able to compute an exact value for the loop. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4981 | CouldComputeBECount = false; |
| 4982 | else |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4983 | ExitCounts.push_back(std::make_pair(ExitBB, EL.Exact)); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 4984 | |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4985 | // 2. Derive the loop's MaxBECount from each exit's max number of |
| 4986 | // non-exiting iterations. Partition the loop exits into two kinds: |
| 4987 | // LoopMustExits and LoopMayExits. |
| 4988 | // |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 4989 | // If the exit dominates the loop latch, it is a LoopMustExit otherwise it |
| 4990 | // is a LoopMayExit. If any computable LoopMustExit is found, then |
| 4991 | // MaxBECount is the minimum EL.Max of computable LoopMustExits. Otherwise, |
| 4992 | // MaxBECount is conservatively the maximum EL.Max, where CouldNotCompute is |
| 4993 | // considered greater than any computable EL.Max. |
| 4994 | if (EL.Max != getCouldNotCompute() && Latch && |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 4995 | DT.dominates(ExitBB, Latch)) { |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 4996 | if (!MustExitMaxBECount) |
| 4997 | MustExitMaxBECount = EL.Max; |
| 4998 | else { |
| 4999 | MustExitMaxBECount = |
| 5000 | getUMinFromMismatchedTypes(MustExitMaxBECount, EL.Max); |
Andrew Trick | e255359 | 2014-05-22 00:37:03 +0000 | [diff] [blame] | 5001 | } |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 5002 | } else if (MayExitMaxBECount != getCouldNotCompute()) { |
| 5003 | if (!MayExitMaxBECount || EL.Max == getCouldNotCompute()) |
| 5004 | MayExitMaxBECount = EL.Max; |
| 5005 | else { |
| 5006 | MayExitMaxBECount = |
| 5007 | getUMaxFromMismatchedTypes(MayExitMaxBECount, EL.Max); |
| 5008 | } |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 5009 | } |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5010 | } |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 5011 | const SCEV *MaxBECount = MustExitMaxBECount ? MustExitMaxBECount : |
| 5012 | (MayExitMaxBECount ? MayExitMaxBECount : getCouldNotCompute()); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5013 | return BackedgeTakenInfo(ExitCounts, CouldComputeBECount, MaxBECount); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5014 | } |
| 5015 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5016 | /// ComputeExitLimit - Compute the number of times the backedge of the specified |
| 5017 | /// loop will execute if it exits via the specified block. |
| 5018 | ScalarEvolution::ExitLimit |
| 5019 | ScalarEvolution::ComputeExitLimit(const Loop *L, BasicBlock *ExitingBlock) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5020 | |
| 5021 | // Okay, we've chosen an exiting block. See what condition causes us to |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5022 | // exit at this block and remember the exit block and whether all other targets |
| 5023 | // lead to the loop header. |
| 5024 | bool MustExecuteLoopHeader = true; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5025 | BasicBlock *Exit = nullptr; |
Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 5026 | for (succ_iterator SI = succ_begin(ExitingBlock), SE = succ_end(ExitingBlock); |
| 5027 | SI != SE; ++SI) |
| 5028 | if (!L->contains(*SI)) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5029 | if (Exit) // Multiple exit successors. |
| 5030 | return getCouldNotCompute(); |
Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 5031 | Exit = *SI; |
| 5032 | } else if (*SI != L->getHeader()) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5033 | MustExecuteLoopHeader = false; |
| 5034 | } |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5035 | |
Chris Lattner | 1895485 | 2007-01-07 02:24:26 +0000 | [diff] [blame] | 5036 | // At this point, we know we have a conditional branch that determines whether |
| 5037 | // the loop is exited. However, we don't know if the branch is executed each |
| 5038 | // time through the loop. If not, then the execution count of the branch will |
| 5039 | // not be equal to the trip count of the loop. |
| 5040 | // |
| 5041 | // Currently we check for this by checking to see if the Exit branch goes to |
| 5042 | // the loop header. If so, we know it will always execute the same number of |
Chris Lattner | 5a55476 | 2007-01-14 01:24:47 +0000 | [diff] [blame] | 5043 | // times as the loop. We also handle the case where the exit block *is* the |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5044 | // loop header. This is common for un-rotated loops. |
| 5045 | // |
| 5046 | // If both of those tests fail, walk up the unique predecessor chain to the |
| 5047 | // header, stopping if there is an edge that doesn't exit the loop. If the |
| 5048 | // header is reached, the execution count of the branch will be equal to the |
| 5049 | // trip count of the loop. |
| 5050 | // |
| 5051 | // More extensive analysis could be done to handle more cases here. |
| 5052 | // |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5053 | if (!MustExecuteLoopHeader && ExitingBlock != L->getHeader()) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5054 | // The simple checks failed, try climbing the unique predecessor chain |
| 5055 | // up to the header. |
| 5056 | bool Ok = false; |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5057 | for (BasicBlock *BB = ExitingBlock; BB; ) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5058 | BasicBlock *Pred = BB->getUniquePredecessor(); |
| 5059 | if (!Pred) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5060 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5061 | TerminatorInst *PredTerm = Pred->getTerminator(); |
Pete Cooper | ebcd748 | 2015-08-06 20:22:46 +0000 | [diff] [blame] | 5062 | for (const BasicBlock *PredSucc : PredTerm->successors()) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5063 | if (PredSucc == BB) |
| 5064 | continue; |
| 5065 | // If the predecessor has a successor that isn't BB and isn't |
| 5066 | // outside the loop, assume the worst. |
| 5067 | if (L->contains(PredSucc)) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5068 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5069 | } |
| 5070 | if (Pred == L->getHeader()) { |
| 5071 | Ok = true; |
| 5072 | break; |
| 5073 | } |
| 5074 | BB = Pred; |
| 5075 | } |
| 5076 | if (!Ok) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5077 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5078 | } |
| 5079 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5080 | bool IsOnlyExit = (L->getExitingBlock() != nullptr); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5081 | TerminatorInst *Term = ExitingBlock->getTerminator(); |
| 5082 | if (BranchInst *BI = dyn_cast<BranchInst>(Term)) { |
| 5083 | assert(BI->isConditional() && "If unconditional, it can't be in loop!"); |
| 5084 | // Proceed to the next level to examine the exit condition expression. |
| 5085 | return ComputeExitLimitFromCond(L, BI->getCondition(), BI->getSuccessor(0), |
| 5086 | BI->getSuccessor(1), |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5087 | /*ControlsExit=*/IsOnlyExit); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5088 | } |
| 5089 | |
| 5090 | if (SwitchInst *SI = dyn_cast<SwitchInst>(Term)) |
| 5091 | return ComputeExitLimitFromSingleExitSwitch(L, SI, Exit, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5092 | /*ControlsExit=*/IsOnlyExit); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5093 | |
| 5094 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5095 | } |
| 5096 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5097 | /// ComputeExitLimitFromCond - Compute the number of times the |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5098 | /// backedge of the specified loop will execute if its exit condition |
| 5099 | /// were a conditional branch of ExitCond, TBB, and FBB. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5100 | /// |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5101 | /// @param ControlsExit is true if ExitCond directly controls the exit |
| 5102 | /// branch. In this case, we can assume that the loop exits only if the |
| 5103 | /// condition is true and can infer that failing to meet the condition prior to |
| 5104 | /// integer wraparound results in undefined behavior. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5105 | ScalarEvolution::ExitLimit |
| 5106 | ScalarEvolution::ComputeExitLimitFromCond(const Loop *L, |
| 5107 | Value *ExitCond, |
| 5108 | BasicBlock *TBB, |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5109 | BasicBlock *FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5110 | bool ControlsExit) { |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 5111 | // Check if the controlling expression for this loop is an And or Or. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5112 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(ExitCond)) { |
| 5113 | if (BO->getOpcode() == Instruction::And) { |
| 5114 | // Recurse on the operands of the and. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5115 | bool EitherMayExit = L->contains(TBB); |
| 5116 | ExitLimit EL0 = ComputeExitLimitFromCond(L, BO->getOperand(0), TBB, FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5117 | ControlsExit && !EitherMayExit); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5118 | ExitLimit EL1 = ComputeExitLimitFromCond(L, BO->getOperand(1), TBB, FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5119 | ControlsExit && !EitherMayExit); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5120 | const SCEV *BECount = getCouldNotCompute(); |
| 5121 | const SCEV *MaxBECount = getCouldNotCompute(); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5122 | if (EitherMayExit) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5123 | // Both conditions must be true for the loop to continue executing. |
| 5124 | // Choose the less conservative count. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5125 | if (EL0.Exact == getCouldNotCompute() || |
| 5126 | EL1.Exact == getCouldNotCompute()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5127 | BECount = getCouldNotCompute(); |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 5128 | else |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5129 | BECount = getUMinFromMismatchedTypes(EL0.Exact, EL1.Exact); |
| 5130 | if (EL0.Max == getCouldNotCompute()) |
| 5131 | MaxBECount = EL1.Max; |
| 5132 | else if (EL1.Max == getCouldNotCompute()) |
| 5133 | MaxBECount = EL0.Max; |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 5134 | else |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5135 | MaxBECount = getUMinFromMismatchedTypes(EL0.Max, EL1.Max); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5136 | } else { |
Dan Gohman | f7495f2 | 2010-08-11 00:12:36 +0000 | [diff] [blame] | 5137 | // Both conditions must be true at the same time for the loop to exit. |
| 5138 | // For now, be conservative. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5139 | assert(L->contains(FBB) && "Loop block has no successor in loop!"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5140 | if (EL0.Max == EL1.Max) |
| 5141 | MaxBECount = EL0.Max; |
| 5142 | if (EL0.Exact == EL1.Exact) |
| 5143 | BECount = EL0.Exact; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5144 | } |
| 5145 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5146 | return ExitLimit(BECount, MaxBECount); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5147 | } |
| 5148 | if (BO->getOpcode() == Instruction::Or) { |
| 5149 | // Recurse on the operands of the or. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5150 | bool EitherMayExit = L->contains(FBB); |
| 5151 | ExitLimit EL0 = ComputeExitLimitFromCond(L, BO->getOperand(0), TBB, FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5152 | ControlsExit && !EitherMayExit); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5153 | ExitLimit EL1 = ComputeExitLimitFromCond(L, BO->getOperand(1), TBB, FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5154 | ControlsExit && !EitherMayExit); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5155 | const SCEV *BECount = getCouldNotCompute(); |
| 5156 | const SCEV *MaxBECount = getCouldNotCompute(); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5157 | if (EitherMayExit) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5158 | // Both conditions must be false for the loop to continue executing. |
| 5159 | // Choose the less conservative count. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5160 | if (EL0.Exact == getCouldNotCompute() || |
| 5161 | EL1.Exact == getCouldNotCompute()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5162 | BECount = getCouldNotCompute(); |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 5163 | else |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5164 | BECount = getUMinFromMismatchedTypes(EL0.Exact, EL1.Exact); |
| 5165 | if (EL0.Max == getCouldNotCompute()) |
| 5166 | MaxBECount = EL1.Max; |
| 5167 | else if (EL1.Max == getCouldNotCompute()) |
| 5168 | MaxBECount = EL0.Max; |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 5169 | else |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5170 | MaxBECount = getUMinFromMismatchedTypes(EL0.Max, EL1.Max); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5171 | } else { |
Dan Gohman | f7495f2 | 2010-08-11 00:12:36 +0000 | [diff] [blame] | 5172 | // Both conditions must be false at the same time for the loop to exit. |
| 5173 | // For now, be conservative. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5174 | assert(L->contains(TBB) && "Loop block has no successor in loop!"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5175 | if (EL0.Max == EL1.Max) |
| 5176 | MaxBECount = EL0.Max; |
| 5177 | if (EL0.Exact == EL1.Exact) |
| 5178 | BECount = EL0.Exact; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5179 | } |
| 5180 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5181 | return ExitLimit(BECount, MaxBECount); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5182 | } |
| 5183 | } |
| 5184 | |
| 5185 | // With an icmp, it may be feasible to compute an exact backedge-taken count. |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 5186 | // Proceed to the next level to examine the icmp. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5187 | if (ICmpInst *ExitCondICmp = dyn_cast<ICmpInst>(ExitCond)) |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5188 | return ComputeExitLimitFromICmp(L, ExitCondICmp, TBB, FBB, ControlsExit); |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5189 | |
Dan Gohman | 6b1e2a8 | 2010-02-19 18:12:07 +0000 | [diff] [blame] | 5190 | // Check for a constant condition. These are normally stripped out by |
| 5191 | // SimplifyCFG, but ScalarEvolution may be used by a pass which wishes to |
| 5192 | // preserve the CFG and is temporarily leaving constant conditions |
| 5193 | // in place. |
| 5194 | if (ConstantInt *CI = dyn_cast<ConstantInt>(ExitCond)) { |
| 5195 | if (L->contains(FBB) == !CI->getZExtValue()) |
| 5196 | // The backedge is always taken. |
| 5197 | return getCouldNotCompute(); |
| 5198 | else |
| 5199 | // The backedge is never taken. |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 5200 | return getConstant(CI->getType(), 0); |
Dan Gohman | 6b1e2a8 | 2010-02-19 18:12:07 +0000 | [diff] [blame] | 5201 | } |
| 5202 | |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 5203 | // If it's not an integer or pointer comparison then compute it the hard way. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5204 | return ComputeExitCountExhaustively(L, ExitCond, !L->contains(TBB)); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5205 | } |
| 5206 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5207 | /// ComputeExitLimitFromICmp - Compute the number of times the |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5208 | /// backedge of the specified loop will execute if its exit condition |
| 5209 | /// were a conditional branch of the ICmpInst ExitCond, TBB, and FBB. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5210 | ScalarEvolution::ExitLimit |
| 5211 | ScalarEvolution::ComputeExitLimitFromICmp(const Loop *L, |
| 5212 | ICmpInst *ExitCond, |
| 5213 | BasicBlock *TBB, |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 5214 | BasicBlock *FBB, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5215 | bool ControlsExit) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5216 | |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5217 | // If the condition was exit on true, convert the condition to exit on false |
| 5218 | ICmpInst::Predicate Cond; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 5219 | if (!L->contains(FBB)) |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5220 | Cond = ExitCond->getPredicate(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5221 | else |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5222 | Cond = ExitCond->getInversePredicate(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5223 | |
| 5224 | // Handle common loops like: for (X = "string"; *X; ++X) |
| 5225 | if (LoadInst *LI = dyn_cast<LoadInst>(ExitCond->getOperand(0))) |
| 5226 | if (Constant *RHS = dyn_cast<Constant>(ExitCond->getOperand(1))) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5227 | ExitLimit ItCnt = |
| 5228 | ComputeLoadConstantCompareExitLimit(LI, RHS, L, Cond); |
Dan Gohman | ba82034 | 2010-02-24 17:31:30 +0000 | [diff] [blame] | 5229 | if (ItCnt.hasAnyInfo()) |
| 5230 | return ItCnt; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5231 | } |
| 5232 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5233 | const SCEV *LHS = getSCEV(ExitCond->getOperand(0)); |
| 5234 | const SCEV *RHS = getSCEV(ExitCond->getOperand(1)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5235 | |
| 5236 | // Try to evaluate any dependencies out of the loop. |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 5237 | LHS = getSCEVAtScope(LHS, L); |
| 5238 | RHS = getSCEVAtScope(RHS, L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5239 | |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5240 | // At this point, we would like to compute how many iterations of the |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5241 | // loop the predicate will return true for these inputs. |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 5242 | if (isLoopInvariant(LHS, L) && !isLoopInvariant(RHS, L)) { |
Dan Gohman | dc5f5cb | 2008-09-16 18:52:57 +0000 | [diff] [blame] | 5243 | // If there is a loop-invariant, force it into the RHS. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5244 | std::swap(LHS, RHS); |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5245 | Cond = ICmpInst::getSwappedPredicate(Cond); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5246 | } |
| 5247 | |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 5248 | // Simplify the operands before analyzing them. |
| 5249 | (void)SimplifyICmpOperands(Cond, LHS, RHS); |
| 5250 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5251 | // If we have a comparison of a chrec against a constant, try to use value |
| 5252 | // ranges to answer this query. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5253 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) |
| 5254 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(LHS)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5255 | if (AddRec->getLoop() == L) { |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 5256 | // Form the constant range. |
| 5257 | ConstantRange CompRange( |
| 5258 | ICmpInst::makeConstantRange(Cond, RHSC->getValue()->getValue())); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5259 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5260 | const SCEV *Ret = AddRec->getNumIterationsInRange(CompRange, *this); |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 5261 | if (!isa<SCEVCouldNotCompute>(Ret)) return Ret; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5262 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5263 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5264 | switch (Cond) { |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5265 | case ICmpInst::ICMP_NE: { // while (X != Y) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5266 | // Convert to: while (X-Y != 0) |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5267 | ExitLimit EL = HowFarToZero(getMinusSCEV(LHS, RHS), L, ControlsExit); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5268 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5269 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5270 | } |
Dan Gohman | 8a8ad7d | 2009-08-20 16:42:55 +0000 | [diff] [blame] | 5271 | case ICmpInst::ICMP_EQ: { // while (X == Y) |
| 5272 | // Convert to: while (X-Y == 0) |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5273 | ExitLimit EL = HowFarToNonZero(getMinusSCEV(LHS, RHS), L); |
| 5274 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5275 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5276 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 5277 | case ICmpInst::ICMP_SLT: |
| 5278 | case ICmpInst::ICMP_ULT: { // while (X < Y) |
| 5279 | bool IsSigned = Cond == ICmpInst::ICMP_SLT; |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5280 | ExitLimit EL = HowManyLessThans(LHS, RHS, L, IsSigned, ControlsExit); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5281 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 5282 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5283 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 5284 | case ICmpInst::ICMP_SGT: |
| 5285 | case ICmpInst::ICMP_UGT: { // while (X > Y) |
| 5286 | bool IsSigned = Cond == ICmpInst::ICMP_SGT; |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5287 | ExitLimit EL = HowManyGreaterThans(LHS, RHS, L, IsSigned, ControlsExit); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5288 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 5289 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5290 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5291 | default: |
Chris Lattner | 0916921 | 2004-04-02 20:26:46 +0000 | [diff] [blame] | 5292 | #if 0 |
David Greene | df1c497 | 2009-12-23 22:18:14 +0000 | [diff] [blame] | 5293 | dbgs() << "ComputeBackedgeTakenCount "; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5294 | if (ExitCond->getOperand(0)->getType()->isUnsigned()) |
David Greene | df1c497 | 2009-12-23 22:18:14 +0000 | [diff] [blame] | 5295 | dbgs() << "[unsigned] "; |
| 5296 | dbgs() << *LHS << " " |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5297 | << Instruction::getOpcodeName(Instruction::ICmp) |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5298 | << " " << *RHS << "\n"; |
Chris Lattner | 0916921 | 2004-04-02 20:26:46 +0000 | [diff] [blame] | 5299 | #endif |
Chris Lattner | 0defaa1 | 2004-04-03 00:43:03 +0000 | [diff] [blame] | 5300 | break; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5301 | } |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5302 | return ComputeExitCountExhaustively(L, ExitCond, !L->contains(TBB)); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5303 | } |
| 5304 | |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5305 | ScalarEvolution::ExitLimit |
| 5306 | ScalarEvolution::ComputeExitLimitFromSingleExitSwitch(const Loop *L, |
| 5307 | SwitchInst *Switch, |
| 5308 | BasicBlock *ExitingBlock, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5309 | bool ControlsExit) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5310 | assert(!L->contains(ExitingBlock) && "Not an exiting block!"); |
| 5311 | |
| 5312 | // Give up if the exit is the default dest of a switch. |
| 5313 | if (Switch->getDefaultDest() == ExitingBlock) |
| 5314 | return getCouldNotCompute(); |
| 5315 | |
| 5316 | assert(L->contains(Switch->getDefaultDest()) && |
| 5317 | "Default case must not exit the loop!"); |
| 5318 | const SCEV *LHS = getSCEVAtScope(Switch->getCondition(), L); |
| 5319 | const SCEV *RHS = getConstant(Switch->findCaseDest(ExitingBlock)); |
| 5320 | |
| 5321 | // while (X != Y) --> while (X-Y != 0) |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 5322 | ExitLimit EL = HowFarToZero(getMinusSCEV(LHS, RHS), L, ControlsExit); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 5323 | if (EL.hasAnyInfo()) |
| 5324 | return EL; |
| 5325 | |
| 5326 | return getCouldNotCompute(); |
| 5327 | } |
| 5328 | |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5329 | static ConstantInt * |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 5330 | EvaluateConstantChrecAtConstant(const SCEVAddRecExpr *AddRec, ConstantInt *C, |
| 5331 | ScalarEvolution &SE) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5332 | const SCEV *InVal = SE.getConstant(C); |
| 5333 | const SCEV *Val = AddRec->evaluateAtIteration(InVal, SE); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5334 | assert(isa<SCEVConstant>(Val) && |
| 5335 | "Evaluation of SCEV at constant didn't fold correctly?"); |
| 5336 | return cast<SCEVConstant>(Val)->getValue(); |
| 5337 | } |
| 5338 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5339 | /// ComputeLoadConstantCompareExitLimit - Given an exit condition of |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5340 | /// 'icmp op load X, cst', try to see if we can compute the backedge |
| 5341 | /// execution count. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5342 | ScalarEvolution::ExitLimit |
| 5343 | ScalarEvolution::ComputeLoadConstantCompareExitLimit( |
| 5344 | LoadInst *LI, |
| 5345 | Constant *RHS, |
| 5346 | const Loop *L, |
| 5347 | ICmpInst::Predicate predicate) { |
| 5348 | |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5349 | if (LI->isVolatile()) return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5350 | |
| 5351 | // Check to see if the loaded pointer is a getelementptr of a global. |
Dan Gohman | ba82034 | 2010-02-24 17:31:30 +0000 | [diff] [blame] | 5352 | // TODO: Use SCEV instead of manually grubbing with GEPs. |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5353 | GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(LI->getOperand(0)); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5354 | if (!GEP) return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5355 | |
| 5356 | // Make sure that it is really a constant global we are gepping, with an |
| 5357 | // initializer, and make sure the first IDX is really 0. |
| 5358 | GlobalVariable *GV = dyn_cast<GlobalVariable>(GEP->getOperand(0)); |
Dan Gohman | 5d5bc6d | 2009-08-19 18:20:44 +0000 | [diff] [blame] | 5359 | if (!GV || !GV->isConstant() || !GV->hasDefinitiveInitializer() || |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5360 | GEP->getNumOperands() < 3 || !isa<Constant>(GEP->getOperand(1)) || |
| 5361 | !cast<Constant>(GEP->getOperand(1))->isNullValue()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5362 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5363 | |
| 5364 | // Okay, we allow one non-constant index into the GEP instruction. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5365 | Value *VarIdx = nullptr; |
Chris Lattner | e166a85 | 2012-01-24 05:49:24 +0000 | [diff] [blame] | 5366 | std::vector<Constant*> Indexes; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5367 | unsigned VarIdxNum = 0; |
| 5368 | for (unsigned i = 2, e = GEP->getNumOperands(); i != e; ++i) |
| 5369 | if (ConstantInt *CI = dyn_cast<ConstantInt>(GEP->getOperand(i))) { |
| 5370 | Indexes.push_back(CI); |
| 5371 | } else if (!isa<ConstantInt>(GEP->getOperand(i))) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5372 | if (VarIdx) return getCouldNotCompute(); // Multiple non-constant idx's. |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5373 | VarIdx = GEP->getOperand(i); |
| 5374 | VarIdxNum = i-2; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5375 | Indexes.push_back(nullptr); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5376 | } |
| 5377 | |
Andrew Trick | 7004e4b | 2012-03-26 22:33:59 +0000 | [diff] [blame] | 5378 | // Loop-invariant loads may be a byproduct of loop optimization. Skip them. |
| 5379 | if (!VarIdx) |
| 5380 | return getCouldNotCompute(); |
| 5381 | |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5382 | // Okay, we know we have a (load (gep GV, 0, X)) comparison with a constant. |
| 5383 | // Check to see if X is a loop variant variable value now. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5384 | const SCEV *Idx = getSCEV(VarIdx); |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 5385 | Idx = getSCEVAtScope(Idx, L); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5386 | |
| 5387 | // We can only recognize very limited forms of loop index expressions, in |
| 5388 | // particular, only affine AddRec's like {C1,+,C2}. |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 5389 | const SCEVAddRecExpr *IdxExpr = dyn_cast<SCEVAddRecExpr>(Idx); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 5390 | if (!IdxExpr || !IdxExpr->isAffine() || isLoopInvariant(IdxExpr, L) || |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5391 | !isa<SCEVConstant>(IdxExpr->getOperand(0)) || |
| 5392 | !isa<SCEVConstant>(IdxExpr->getOperand(1))) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5393 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5394 | |
| 5395 | unsigned MaxSteps = MaxBruteForceIterations; |
| 5396 | for (unsigned IterationNum = 0; IterationNum != MaxSteps; ++IterationNum) { |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 5397 | ConstantInt *ItCst = ConstantInt::get( |
Owen Anderson | b6b2530 | 2009-07-14 23:09:55 +0000 | [diff] [blame] | 5398 | cast<IntegerType>(IdxExpr->getType()), IterationNum); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5399 | ConstantInt *Val = EvaluateConstantChrecAtConstant(IdxExpr, ItCst, *this); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5400 | |
| 5401 | // Form the GEP offset. |
| 5402 | Indexes[VarIdxNum] = Val; |
| 5403 | |
Chris Lattner | e166a85 | 2012-01-24 05:49:24 +0000 | [diff] [blame] | 5404 | Constant *Result = ConstantFoldLoadThroughGEPIndices(GV->getInitializer(), |
| 5405 | Indexes); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5406 | if (!Result) break; // Cannot compute! |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5407 | |
| 5408 | // Evaluate the condition for this iteration. |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 5409 | Result = ConstantExpr::getICmp(predicate, Result, RHS); |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 5410 | if (!isa<ConstantInt>(Result)) break; // Couldn't decide for sure |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 5411 | if (cast<ConstantInt>(Result)->getValue().isMinValue()) { |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5412 | #if 0 |
David Greene | df1c497 | 2009-12-23 22:18:14 +0000 | [diff] [blame] | 5413 | dbgs() << "\n***\n*** Computed loop count " << *ItCst |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 5414 | << "\n*** From global " << *GV << "*** BB: " << *L->getHeader() |
| 5415 | << "***\n"; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5416 | #endif |
| 5417 | ++NumArrayLenItCounts; |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5418 | return getConstant(ItCst); // Found terminating iteration! |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5419 | } |
| 5420 | } |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5421 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 5422 | } |
| 5423 | |
| 5424 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5425 | /// CanConstantFold - Return true if we can constant fold an instruction of the |
| 5426 | /// specified type, assuming that all operands were constants. |
| 5427 | static bool CanConstantFold(const Instruction *I) { |
Reid Spencer | 2341c22 | 2007-02-02 02:16:23 +0000 | [diff] [blame] | 5428 | if (isa<BinaryOperator>(I) || isa<CmpInst>(I) || |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5429 | isa<SelectInst>(I) || isa<CastInst>(I) || isa<GetElementPtrInst>(I) || |
| 5430 | isa<LoadInst>(I)) |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5431 | return true; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5432 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5433 | if (const CallInst *CI = dyn_cast<CallInst>(I)) |
| 5434 | if (const Function *F = CI->getCalledFunction()) |
Dan Gohman | a65951f | 2008-01-31 01:05:10 +0000 | [diff] [blame] | 5435 | return canConstantFoldCallTo(F); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5436 | return false; |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5437 | } |
| 5438 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5439 | /// Determine whether this instruction can constant evolve within this loop |
| 5440 | /// assuming its operands can all constant evolve. |
| 5441 | static bool canConstantEvolve(Instruction *I, const Loop *L) { |
| 5442 | // An instruction outside of the loop can't be derived from a loop PHI. |
| 5443 | if (!L->contains(I)) return false; |
| 5444 | |
| 5445 | if (isa<PHINode>(I)) { |
David Blaikie | 19ef0d3 | 2015-03-24 16:33:19 +0000 | [diff] [blame] | 5446 | // We don't currently keep track of the control flow needed to evaluate |
| 5447 | // PHIs, so we cannot handle PHIs inside of loops. |
| 5448 | return L->getHeader() == I->getParent(); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5449 | } |
| 5450 | |
| 5451 | // If we won't be able to constant fold this expression even if the operands |
| 5452 | // are constants, bail early. |
| 5453 | return CanConstantFold(I); |
| 5454 | } |
| 5455 | |
| 5456 | /// getConstantEvolvingPHIOperands - Implement getConstantEvolvingPHI by |
| 5457 | /// recursing through each instruction operand until reaching a loop header phi. |
| 5458 | static PHINode * |
| 5459 | getConstantEvolvingPHIOperands(Instruction *UseInst, const Loop *L, |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5460 | DenseMap<Instruction *, PHINode *> &PHIMap) { |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5461 | |
| 5462 | // Otherwise, we can evaluate this instruction if all of its operands are |
| 5463 | // constant or derived from a PHI node themselves. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5464 | PHINode *PHI = nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5465 | for (Instruction::op_iterator OpI = UseInst->op_begin(), |
| 5466 | OpE = UseInst->op_end(); OpI != OpE; ++OpI) { |
| 5467 | |
| 5468 | if (isa<Constant>(*OpI)) continue; |
| 5469 | |
| 5470 | Instruction *OpInst = dyn_cast<Instruction>(*OpI); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5471 | if (!OpInst || !canConstantEvolve(OpInst, L)) return nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5472 | |
| 5473 | PHINode *P = dyn_cast<PHINode>(OpInst); |
Andrew Trick | 3e8a576 | 2011-10-05 22:06:53 +0000 | [diff] [blame] | 5474 | if (!P) |
| 5475 | // If this operand is already visited, reuse the prior result. |
| 5476 | // We may have P != PHI if this is the deepest point at which the |
| 5477 | // inconsistent paths meet. |
| 5478 | P = PHIMap.lookup(OpInst); |
| 5479 | if (!P) { |
| 5480 | // Recurse and memoize the results, whether a phi is found or not. |
| 5481 | // This recursive call invalidates pointers into PHIMap. |
| 5482 | P = getConstantEvolvingPHIOperands(OpInst, L, PHIMap); |
| 5483 | PHIMap[OpInst] = P; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5484 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5485 | if (!P) |
| 5486 | return nullptr; // Not evolving from PHI |
| 5487 | if (PHI && PHI != P) |
| 5488 | return nullptr; // Evolving from multiple different PHIs. |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5489 | PHI = P; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5490 | } |
| 5491 | // This is a expression evolving from a constant PHI! |
| 5492 | return PHI; |
| 5493 | } |
| 5494 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5495 | /// getConstantEvolvingPHI - Given an LLVM value and a loop, return a PHI node |
| 5496 | /// in the loop that V is derived from. We allow arbitrary operations along the |
| 5497 | /// way, but the operands of an operation must either be constants or a value |
| 5498 | /// derived from a constant PHI. If this expression does not fit with these |
| 5499 | /// constraints, return null. |
| 5500 | static PHINode *getConstantEvolvingPHI(Value *V, const Loop *L) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5501 | Instruction *I = dyn_cast<Instruction>(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5502 | if (!I || !canConstantEvolve(I, L)) return nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5503 | |
Anton Korobeynikov | 579f071 | 2008-02-20 11:08:44 +0000 | [diff] [blame] | 5504 | if (PHINode *PN = dyn_cast<PHINode>(I)) { |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5505 | return PN; |
Anton Korobeynikov | 579f071 | 2008-02-20 11:08:44 +0000 | [diff] [blame] | 5506 | } |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5507 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5508 | // Record non-constant instructions contained by the loop. |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5509 | DenseMap<Instruction *, PHINode *> PHIMap; |
| 5510 | return getConstantEvolvingPHIOperands(I, L, PHIMap); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5511 | } |
| 5512 | |
| 5513 | /// EvaluateExpression - Given an expression that passes the |
| 5514 | /// getConstantEvolvingPHI predicate, evaluate its value assuming the PHI node |
| 5515 | /// in the loop has the value PHIVal. If we can't fold this expression for some |
| 5516 | /// reason, return null. |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5517 | static Constant *EvaluateExpression(Value *V, const Loop *L, |
| 5518 | DenseMap<Instruction *, Constant *> &Vals, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5519 | const DataLayout &DL, |
Chad Rosier | e6de63d | 2011-12-01 21:29:16 +0000 | [diff] [blame] | 5520 | const TargetLibraryInfo *TLI) { |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5521 | // Convenient constant check, but redundant for recursive calls. |
Reid Spencer | 30d69a5 | 2004-07-18 00:18:30 +0000 | [diff] [blame] | 5522 | if (Constant *C = dyn_cast<Constant>(V)) return C; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5523 | Instruction *I = dyn_cast<Instruction>(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5524 | if (!I) return nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5525 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5526 | if (Constant *C = Vals.lookup(I)) return C; |
| 5527 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5528 | // An instruction inside the loop depends on a value outside the loop that we |
| 5529 | // weren't given a mapping for, or a value such as a call inside the loop. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5530 | if (!canConstantEvolve(I, L)) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5531 | |
| 5532 | // An unmapped PHI can be due to a branch or another loop inside this loop, |
| 5533 | // or due to this not being the initial iteration through a loop where we |
| 5534 | // couldn't compute the evolution of this particular PHI last time. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5535 | if (isa<PHINode>(I)) return nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5536 | |
Dan Gohman | f820bd3 | 2010-06-22 13:15:46 +0000 | [diff] [blame] | 5537 | std::vector<Constant*> Operands(I->getNumOperands()); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5538 | |
| 5539 | for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) { |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5540 | Instruction *Operand = dyn_cast<Instruction>(I->getOperand(i)); |
| 5541 | if (!Operand) { |
Nick Lewycky | a447e0f3 | 2011-10-14 09:38:46 +0000 | [diff] [blame] | 5542 | Operands[i] = dyn_cast<Constant>(I->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5543 | if (!Operands[i]) return nullptr; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5544 | continue; |
| 5545 | } |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 5546 | Constant *C = EvaluateExpression(Operand, L, Vals, DL, TLI); |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5547 | Vals[Operand] = C; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5548 | if (!C) return nullptr; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 5549 | Operands[i] = C; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5550 | } |
| 5551 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5552 | if (CmpInst *CI = dyn_cast<CmpInst>(I)) |
Chris Lattner | cdfb80d | 2009-11-09 23:06:58 +0000 | [diff] [blame] | 5553 | return ConstantFoldCompareInstOperands(CI->getPredicate(), Operands[0], |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 5554 | Operands[1], DL, TLI); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5555 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 5556 | if (!LI->isVolatile()) |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 5557 | return ConstantFoldLoadFromConstPtr(Operands[0], DL); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5558 | } |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 5559 | return ConstantFoldInstOperands(I->getOpcode(), I->getType(), Operands, DL, |
Chad Rosier | e6de63d | 2011-12-01 21:29:16 +0000 | [diff] [blame] | 5560 | TLI); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5561 | } |
| 5562 | |
| 5563 | /// getConstantEvolutionLoopExitValue - If we know that the specified Phi is |
| 5564 | /// in the header of its containing loop, we know the loop executes a |
| 5565 | /// constant number of times, and the PHI node is just a recurrence |
| 5566 | /// involving constants, fold it. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5567 | Constant * |
| 5568 | ScalarEvolution::getConstantEvolutionLoopExitValue(PHINode *PN, |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 5569 | const APInt &BEs, |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5570 | const Loop *L) { |
Dan Gohman | 0daf687 | 2011-05-09 18:44:09 +0000 | [diff] [blame] | 5571 | DenseMap<PHINode*, Constant*>::const_iterator I = |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5572 | ConstantEvolutionLoopExitValue.find(PN); |
| 5573 | if (I != ConstantEvolutionLoopExitValue.end()) |
| 5574 | return I->second; |
| 5575 | |
Dan Gohman | 4ce1fb1 | 2010-04-08 23:03:40 +0000 | [diff] [blame] | 5576 | if (BEs.ugt(MaxBruteForceIterations)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5577 | return ConstantEvolutionLoopExitValue[PN] = nullptr; // Not going to evaluate it. |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5578 | |
| 5579 | Constant *&RetVal = ConstantEvolutionLoopExitValue[PN]; |
| 5580 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5581 | DenseMap<Instruction *, Constant *> CurrentIterVals; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5582 | BasicBlock *Header = L->getHeader(); |
| 5583 | assert(PN->getParent() == Header && "Can't evaluate PHI not in loop header!"); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5584 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5585 | // Since the loop is canonicalized, the PHI node must have two entries. One |
| 5586 | // entry must be a constant (coming in from outside of the loop), and the |
| 5587 | // second must be derived from the same PHI. |
| 5588 | bool SecondIsBackedge = L->contains(PN->getIncomingBlock(1)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5589 | PHINode *PHI = nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5590 | for (BasicBlock::iterator I = Header->begin(); |
| 5591 | (PHI = dyn_cast<PHINode>(I)); ++I) { |
| 5592 | Constant *StartCST = |
| 5593 | dyn_cast<Constant>(PHI->getIncomingValue(!SecondIsBackedge)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5594 | if (!StartCST) continue; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5595 | CurrentIterVals[PHI] = StartCST; |
| 5596 | } |
| 5597 | if (!CurrentIterVals.count(PN)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5598 | return RetVal = nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5599 | |
| 5600 | Value *BEValue = PN->getIncomingValue(SecondIsBackedge); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5601 | |
| 5602 | // Execute the loop symbolically to determine the exit value. |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5603 | if (BEs.getActiveBits() >= 32) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5604 | return RetVal = nullptr; // More than 2^32-1 iterations?? Not doing it! |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5605 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5606 | unsigned NumIterations = BEs.getZExtValue(); // must be in range |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 5607 | unsigned IterationNum = 0; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5608 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5609 | for (; ; ++IterationNum) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5610 | if (IterationNum == NumIterations) |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5611 | return RetVal = CurrentIterVals[PN]; // Got exit value! |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5612 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5613 | // Compute the value of the PHIs for the next iteration. |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5614 | // EvaluateExpression adds non-phi values to the CurrentIterVals map. |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5615 | DenseMap<Instruction *, Constant *> NextIterVals; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5616 | Constant *NextPHI = |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5617 | EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5618 | if (!NextPHI) |
| 5619 | return nullptr; // Couldn't evaluate! |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5620 | NextIterVals[PN] = NextPHI; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5621 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5622 | bool StoppedEvolving = NextPHI == CurrentIterVals[PN]; |
| 5623 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5624 | // Also evaluate the other PHI nodes. However, we don't get to stop if we |
| 5625 | // cease to be able to evaluate one of them or if they stop evolving, |
| 5626 | // because that doesn't necessarily prevent us from computing PN. |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 5627 | SmallVector<std::pair<PHINode *, Constant *>, 8> PHIsToCompute; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5628 | for (DenseMap<Instruction *, Constant *>::const_iterator |
| 5629 | I = CurrentIterVals.begin(), E = CurrentIterVals.end(); I != E; ++I){ |
| 5630 | PHINode *PHI = dyn_cast<PHINode>(I->first); |
Nick Lewycky | 8e904de | 2011-10-24 05:51:01 +0000 | [diff] [blame] | 5631 | if (!PHI || PHI == PN || PHI->getParent() != Header) continue; |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 5632 | PHIsToCompute.push_back(std::make_pair(PHI, I->second)); |
| 5633 | } |
| 5634 | // We use two distinct loops because EvaluateExpression may invalidate any |
| 5635 | // iterators into CurrentIterVals. |
| 5636 | for (SmallVectorImpl<std::pair<PHINode *, Constant*> >::const_iterator |
| 5637 | I = PHIsToCompute.begin(), E = PHIsToCompute.end(); I != E; ++I) { |
| 5638 | PHINode *PHI = I->first; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5639 | Constant *&NextPHI = NextIterVals[PHI]; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5640 | if (!NextPHI) { // Not already computed. |
| 5641 | Value *BEValue = PHI->getIncomingValue(SecondIsBackedge); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5642 | NextPHI = EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5643 | } |
| 5644 | if (NextPHI != I->second) |
| 5645 | StoppedEvolving = false; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5646 | } |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5647 | |
| 5648 | // If all entries in CurrentIterVals == NextIterVals then we can stop |
| 5649 | // iterating, the loop can't continue to change. |
| 5650 | if (StoppedEvolving) |
| 5651 | return RetVal = CurrentIterVals[PN]; |
| 5652 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 5653 | CurrentIterVals.swap(NextIterVals); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5654 | } |
| 5655 | } |
| 5656 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 5657 | /// ComputeExitCountExhaustively - If the loop is known to execute a |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5658 | /// constant number of times (the condition evolves only from constants), |
| 5659 | /// try to evaluate a few iterations of the loop until we get the exit |
| 5660 | /// condition gets a value of ExitWhen (true or false). If we cannot |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5661 | /// evaluate the trip count of the loop, return getCouldNotCompute(). |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5662 | const SCEV *ScalarEvolution::ComputeExitCountExhaustively(const Loop *L, |
| 5663 | Value *Cond, |
| 5664 | bool ExitWhen) { |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5665 | PHINode *PN = getConstantEvolvingPHI(Cond, L); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5666 | if (!PN) return getCouldNotCompute(); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5667 | |
Dan Gohman | 866971e | 2010-06-19 14:17:24 +0000 | [diff] [blame] | 5668 | // If the loop is canonicalized, the PHI will have exactly two entries. |
| 5669 | // That's the only form we support here. |
| 5670 | if (PN->getNumIncomingValues() != 2) return getCouldNotCompute(); |
| 5671 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5672 | DenseMap<Instruction *, Constant *> CurrentIterVals; |
| 5673 | BasicBlock *Header = L->getHeader(); |
| 5674 | assert(PN->getParent() == Header && "Can't evaluate PHI not in loop header!"); |
| 5675 | |
Dan Gohman | 866971e | 2010-06-19 14:17:24 +0000 | [diff] [blame] | 5676 | // One entry must be a constant (coming in from outside of the loop), and the |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5677 | // second must be derived from the same PHI. |
| 5678 | bool SecondIsBackedge = L->contains(PN->getIncomingBlock(1)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5679 | PHINode *PHI = nullptr; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5680 | for (BasicBlock::iterator I = Header->begin(); |
| 5681 | (PHI = dyn_cast<PHINode>(I)); ++I) { |
| 5682 | Constant *StartCST = |
| 5683 | dyn_cast<Constant>(PHI->getIncomingValue(!SecondIsBackedge)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5684 | if (!StartCST) continue; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5685 | CurrentIterVals[PHI] = StartCST; |
| 5686 | } |
| 5687 | if (!CurrentIterVals.count(PN)) |
| 5688 | return getCouldNotCompute(); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5689 | |
| 5690 | // Okay, we find a PHI node that defines the trip count of this loop. Execute |
| 5691 | // the loop symbolically to determine when the condition gets a value of |
| 5692 | // "ExitWhen". |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 5693 | unsigned MaxIterations = MaxBruteForceIterations; // Limit analysis. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5694 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5695 | for (unsigned IterationNum = 0; IterationNum != MaxIterations;++IterationNum){ |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5696 | ConstantInt *CondVal = dyn_cast_or_null<ConstantInt>( |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5697 | EvaluateExpression(Cond, L, CurrentIterVals, DL, &TLI)); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5698 | |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 5699 | // Couldn't symbolically evaluate. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5700 | if (!CondVal) return getCouldNotCompute(); |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 5701 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 5702 | if (CondVal->getValue() == uint64_t(ExitWhen)) { |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5703 | ++NumBruteForceTripCountsComputed; |
Owen Anderson | 55f1c09 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 5704 | return getConstant(Type::getInt32Ty(getContext()), IterationNum); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5705 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5706 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5707 | // Update all the PHI nodes for the next iteration. |
| 5708 | DenseMap<Instruction *, Constant *> NextIterVals; |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 5709 | |
| 5710 | // Create a list of which PHIs we need to compute. We want to do this before |
| 5711 | // calling EvaluateExpression on them because that may invalidate iterators |
| 5712 | // into CurrentIterVals. |
| 5713 | SmallVector<PHINode *, 8> PHIsToCompute; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5714 | for (DenseMap<Instruction *, Constant *>::const_iterator |
| 5715 | I = CurrentIterVals.begin(), E = CurrentIterVals.end(); I != E; ++I){ |
| 5716 | PHINode *PHI = dyn_cast<PHINode>(I->first); |
| 5717 | if (!PHI || PHI->getParent() != Header) continue; |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 5718 | PHIsToCompute.push_back(PHI); |
| 5719 | } |
| 5720 | for (SmallVectorImpl<PHINode *>::const_iterator I = PHIsToCompute.begin(), |
| 5721 | E = PHIsToCompute.end(); I != E; ++I) { |
| 5722 | PHINode *PHI = *I; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5723 | Constant *&NextPHI = NextIterVals[PHI]; |
| 5724 | if (NextPHI) continue; // Already computed! |
| 5725 | |
| 5726 | Value *BEValue = PHI->getIncomingValue(SecondIsBackedge); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5727 | NextPHI = EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 5728 | } |
| 5729 | CurrentIterVals.swap(NextIterVals); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 5730 | } |
| 5731 | |
| 5732 | // Too many iterations were needed to evaluate. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 5733 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5734 | } |
| 5735 | |
Dan Gohman | 237d9e5 | 2009-09-03 15:00:26 +0000 | [diff] [blame] | 5736 | /// getSCEVAtScope - Return a SCEV expression for the specified value |
Dan Gohman | b81f47d | 2009-05-08 20:38:54 +0000 | [diff] [blame] | 5737 | /// at the specified scope in the program. The L value specifies a loop |
| 5738 | /// nest to evaluate the expression at, where null is the top-level or a |
| 5739 | /// specified loop is immediately inside of the loop. |
| 5740 | /// |
| 5741 | /// This method can be used to compute the exit value for a variable defined |
| 5742 | /// in a loop by querying what the value will hold in the parent loop. |
| 5743 | /// |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 5744 | /// In the case that a relevant loop exit value cannot be computed, the |
| 5745 | /// original value V is returned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5746 | const SCEV *ScalarEvolution::getSCEVAtScope(const SCEV *V, const Loop *L) { |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 5747 | // Check to see if we've folded this expression at this loop before. |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 5748 | SmallVector<std::pair<const Loop *, const SCEV *>, 2> &Values = ValuesAtScopes[V]; |
| 5749 | for (unsigned u = 0; u < Values.size(); u++) { |
| 5750 | if (Values[u].first == L) |
| 5751 | return Values[u].second ? Values[u].second : V; |
| 5752 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5753 | Values.push_back(std::make_pair(L, static_cast<const SCEV *>(nullptr))); |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 5754 | // Otherwise compute it. |
| 5755 | const SCEV *C = computeSCEVAtScope(V, L); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 5756 | SmallVector<std::pair<const Loop *, const SCEV *>, 2> &Values2 = ValuesAtScopes[V]; |
| 5757 | for (unsigned u = Values2.size(); u > 0; u--) { |
| 5758 | if (Values2[u - 1].first == L) { |
| 5759 | Values2[u - 1].second = C; |
| 5760 | break; |
| 5761 | } |
| 5762 | } |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 5763 | return C; |
| 5764 | } |
| 5765 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5766 | /// This builds up a Constant using the ConstantExpr interface. That way, we |
| 5767 | /// will return Constants for objects which aren't represented by a |
| 5768 | /// SCEVConstant, because SCEVConstant is restricted to ConstantInt. |
| 5769 | /// Returns NULL if the SCEV isn't representable as a Constant. |
| 5770 | static Constant *BuildConstantFromSCEV(const SCEV *V) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 5771 | switch (static_cast<SCEVTypes>(V->getSCEVType())) { |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5772 | case scCouldNotCompute: |
| 5773 | case scAddRecExpr: |
| 5774 | break; |
| 5775 | case scConstant: |
| 5776 | return cast<SCEVConstant>(V)->getValue(); |
| 5777 | case scUnknown: |
| 5778 | return dyn_cast<Constant>(cast<SCEVUnknown>(V)->getValue()); |
| 5779 | case scSignExtend: { |
| 5780 | const SCEVSignExtendExpr *SS = cast<SCEVSignExtendExpr>(V); |
| 5781 | if (Constant *CastOp = BuildConstantFromSCEV(SS->getOperand())) |
| 5782 | return ConstantExpr::getSExt(CastOp, SS->getType()); |
| 5783 | break; |
| 5784 | } |
| 5785 | case scZeroExtend: { |
| 5786 | const SCEVZeroExtendExpr *SZ = cast<SCEVZeroExtendExpr>(V); |
| 5787 | if (Constant *CastOp = BuildConstantFromSCEV(SZ->getOperand())) |
| 5788 | return ConstantExpr::getZExt(CastOp, SZ->getType()); |
| 5789 | break; |
| 5790 | } |
| 5791 | case scTruncate: { |
| 5792 | const SCEVTruncateExpr *ST = cast<SCEVTruncateExpr>(V); |
| 5793 | if (Constant *CastOp = BuildConstantFromSCEV(ST->getOperand())) |
| 5794 | return ConstantExpr::getTrunc(CastOp, ST->getType()); |
| 5795 | break; |
| 5796 | } |
| 5797 | case scAddExpr: { |
| 5798 | const SCEVAddExpr *SA = cast<SCEVAddExpr>(V); |
| 5799 | if (Constant *C = BuildConstantFromSCEV(SA->getOperand(0))) { |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 5800 | if (PointerType *PTy = dyn_cast<PointerType>(C->getType())) { |
| 5801 | unsigned AS = PTy->getAddressSpace(); |
| 5802 | Type *DestPtrTy = Type::getInt8PtrTy(C->getContext(), AS); |
| 5803 | C = ConstantExpr::getBitCast(C, DestPtrTy); |
| 5804 | } |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5805 | for (unsigned i = 1, e = SA->getNumOperands(); i != e; ++i) { |
| 5806 | Constant *C2 = BuildConstantFromSCEV(SA->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5807 | if (!C2) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5808 | |
| 5809 | // First pointer! |
| 5810 | if (!C->getType()->isPointerTy() && C2->getType()->isPointerTy()) { |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 5811 | unsigned AS = C2->getType()->getPointerAddressSpace(); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5812 | std::swap(C, C2); |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 5813 | Type *DestPtrTy = Type::getInt8PtrTy(C->getContext(), AS); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5814 | // The offsets have been converted to bytes. We can add bytes to an |
| 5815 | // i8* by GEP with the byte count in the first index. |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 5816 | C = ConstantExpr::getBitCast(C, DestPtrTy); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5817 | } |
| 5818 | |
| 5819 | // Don't bother trying to sum two pointers. We probably can't |
| 5820 | // statically compute a load that results from it anyway. |
| 5821 | if (C2->getType()->isPointerTy()) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5822 | return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5823 | |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 5824 | if (PointerType *PTy = dyn_cast<PointerType>(C->getType())) { |
| 5825 | if (PTy->getElementType()->isStructTy()) |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5826 | C2 = ConstantExpr::getIntegerCast( |
| 5827 | C2, Type::getInt32Ty(C->getContext()), true); |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 5828 | C = ConstantExpr::getGetElementPtr(PTy->getElementType(), C, C2); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5829 | } else |
| 5830 | C = ConstantExpr::getAdd(C, C2); |
| 5831 | } |
| 5832 | return C; |
| 5833 | } |
| 5834 | break; |
| 5835 | } |
| 5836 | case scMulExpr: { |
| 5837 | const SCEVMulExpr *SM = cast<SCEVMulExpr>(V); |
| 5838 | if (Constant *C = BuildConstantFromSCEV(SM->getOperand(0))) { |
| 5839 | // Don't bother with pointers at all. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5840 | if (C->getType()->isPointerTy()) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5841 | for (unsigned i = 1, e = SM->getNumOperands(); i != e; ++i) { |
| 5842 | Constant *C2 = BuildConstantFromSCEV(SM->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5843 | if (!C2 || C2->getType()->isPointerTy()) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5844 | C = ConstantExpr::getMul(C, C2); |
| 5845 | } |
| 5846 | return C; |
| 5847 | } |
| 5848 | break; |
| 5849 | } |
| 5850 | case scUDivExpr: { |
| 5851 | const SCEVUDivExpr *SU = cast<SCEVUDivExpr>(V); |
| 5852 | if (Constant *LHS = BuildConstantFromSCEV(SU->getLHS())) |
| 5853 | if (Constant *RHS = BuildConstantFromSCEV(SU->getRHS())) |
| 5854 | if (LHS->getType() == RHS->getType()) |
| 5855 | return ConstantExpr::getUDiv(LHS, RHS); |
| 5856 | break; |
| 5857 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 5858 | case scSMaxExpr: |
| 5859 | case scUMaxExpr: |
| 5860 | break; // TODO: smax, umax. |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5861 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5862 | return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5863 | } |
| 5864 | |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 5865 | const SCEV *ScalarEvolution::computeSCEVAtScope(const SCEV *V, const Loop *L) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5866 | if (isa<SCEVConstant>(V)) return V; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5867 | |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 5868 | // If this instruction is evolved from a constant-evolving PHI, compute the |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5869 | // exit value from the loop without using SCEVs. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5870 | if (const SCEVUnknown *SU = dyn_cast<SCEVUnknown>(V)) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5871 | if (Instruction *I = dyn_cast<Instruction>(SU->getValue())) { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5872 | const Loop *LI = this->LI[I->getParent()]; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5873 | if (LI && LI->getParentLoop() == L) // Looking for loop exit value. |
| 5874 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 5875 | if (PN->getParent() == LI->getHeader()) { |
| 5876 | // Okay, there is no closed form solution for the PHI node. Check |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5877 | // to see if the loop that contains it has a known backedge-taken |
| 5878 | // count. If so, we may be able to force computation of the exit |
| 5879 | // value. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5880 | const SCEV *BackedgeTakenCount = getBackedgeTakenCount(LI); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5881 | if (const SCEVConstant *BTCC = |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5882 | dyn_cast<SCEVConstant>(BackedgeTakenCount)) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5883 | // Okay, we know how many times the containing loop executes. If |
| 5884 | // this is a constant evolving PHI node, get the final value at |
| 5885 | // the specified iteration number. |
| 5886 | Constant *RV = getConstantEvolutionLoopExitValue(PN, |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 5887 | BTCC->getValue()->getValue(), |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5888 | LI); |
Dan Gohman | 9d203c6 | 2009-06-29 21:31:18 +0000 | [diff] [blame] | 5889 | if (RV) return getSCEV(RV); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5890 | } |
| 5891 | } |
| 5892 | |
Reid Spencer | e6328ca | 2006-12-04 21:33:23 +0000 | [diff] [blame] | 5893 | // Okay, this is an expression that we cannot symbolically evaluate |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5894 | // into a SCEV. Check to see if it's possible to symbolically evaluate |
Reid Spencer | e6328ca | 2006-12-04 21:33:23 +0000 | [diff] [blame] | 5895 | // the arguments into constants, and if so, try to constant propagate the |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5896 | // result. This is particularly useful for computing loop exit values. |
| 5897 | if (CanConstantFold(I)) { |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5898 | SmallVector<Constant *, 4> Operands; |
| 5899 | bool MadeImprovement = false; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5900 | for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) { |
| 5901 | Value *Op = I->getOperand(i); |
| 5902 | if (Constant *C = dyn_cast<Constant>(Op)) { |
| 5903 | Operands.push_back(C); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5904 | continue; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5905 | } |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5906 | |
| 5907 | // If any of the operands is non-constant and if they are |
| 5908 | // non-integer and non-pointer, don't even try to analyze them |
| 5909 | // with scev techniques. |
| 5910 | if (!isSCEVable(Op->getType())) |
| 5911 | return V; |
| 5912 | |
| 5913 | const SCEV *OrigV = getSCEV(Op); |
| 5914 | const SCEV *OpV = getSCEVAtScope(OrigV, L); |
| 5915 | MadeImprovement |= OrigV != OpV; |
| 5916 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5917 | Constant *C = BuildConstantFromSCEV(OpV); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5918 | if (!C) return V; |
| 5919 | if (C->getType() != Op->getType()) |
| 5920 | C = ConstantExpr::getCast(CastInst::getCastOpcode(C, false, |
| 5921 | Op->getType(), |
| 5922 | false), |
| 5923 | C, Op->getType()); |
| 5924 | Operands.push_back(C); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5925 | } |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5926 | |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5927 | // Check to see if getSCEVAtScope actually made an improvement. |
| 5928 | if (MadeImprovement) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5929 | Constant *C = nullptr; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5930 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5931 | if (const CmpInst *CI = dyn_cast<CmpInst>(I)) |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5932 | C = ConstantFoldCompareInstOperands(CI->getPredicate(), Operands[0], |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5933 | Operands[1], DL, &TLI); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5934 | else if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 5935 | if (!LI->isVolatile()) |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 5936 | C = ConstantFoldLoadFromConstPtr(Operands[0], DL); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 5937 | } else |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5938 | C = ConstantFoldInstOperands(I->getOpcode(), I->getType(), Operands, |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5939 | DL, &TLI); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5940 | if (!C) return V; |
Dan Gohman | 4aad750 | 2010-02-24 19:31:47 +0000 | [diff] [blame] | 5941 | return getSCEV(C); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5942 | } |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 5943 | } |
| 5944 | } |
| 5945 | |
| 5946 | // This is some other type of SCEVUnknown, just return it. |
| 5947 | return V; |
| 5948 | } |
| 5949 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5950 | if (const SCEVCommutativeExpr *Comm = dyn_cast<SCEVCommutativeExpr>(V)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5951 | // Avoid performing the look-up in the common case where the specified |
| 5952 | // expression has no loop-variant portions. |
| 5953 | for (unsigned i = 0, e = Comm->getNumOperands(); i != e; ++i) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5954 | const SCEV *OpAtScope = getSCEVAtScope(Comm->getOperand(i), L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5955 | if (OpAtScope != Comm->getOperand(i)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5956 | // Okay, at least one of these operands is loop variant but might be |
| 5957 | // foldable. Build a new instance of the folded commutative expression. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 5958 | SmallVector<const SCEV *, 8> NewOps(Comm->op_begin(), |
| 5959 | Comm->op_begin()+i); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5960 | NewOps.push_back(OpAtScope); |
| 5961 | |
| 5962 | for (++i; i != e; ++i) { |
| 5963 | OpAtScope = getSCEVAtScope(Comm->getOperand(i), L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5964 | NewOps.push_back(OpAtScope); |
| 5965 | } |
| 5966 | if (isa<SCEVAddExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5967 | return getAddExpr(NewOps); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 5968 | if (isa<SCEVMulExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5969 | return getMulExpr(NewOps); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 5970 | if (isa<SCEVSMaxExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5971 | return getSMaxExpr(NewOps); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 5972 | if (isa<SCEVUMaxExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5973 | return getUMaxExpr(NewOps); |
Torok Edwin | fbcc663 | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 5974 | llvm_unreachable("Unknown commutative SCEV type!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5975 | } |
| 5976 | } |
| 5977 | // If we got here, all operands are loop invariant. |
| 5978 | return Comm; |
| 5979 | } |
| 5980 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5981 | if (const SCEVUDivExpr *Div = dyn_cast<SCEVUDivExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5982 | const SCEV *LHS = getSCEVAtScope(Div->getLHS(), L); |
| 5983 | const SCEV *RHS = getSCEVAtScope(Div->getRHS(), L); |
Nick Lewycky | 5234830 | 2009-01-13 09:18:58 +0000 | [diff] [blame] | 5984 | if (LHS == Div->getLHS() && RHS == Div->getRHS()) |
| 5985 | return Div; // must be loop invariant |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5986 | return getUDivExpr(LHS, RHS); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5987 | } |
| 5988 | |
| 5989 | // If this is a loop recurrence for a loop that does not contain L, then we |
| 5990 | // are dealing with the final value computed by the loop. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5991 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(V)) { |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 5992 | // First, attempt to evaluate each operand. |
| 5993 | // Avoid performing the look-up in the common case where the specified |
| 5994 | // expression has no loop-variant portions. |
| 5995 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) { |
| 5996 | const SCEV *OpAtScope = getSCEVAtScope(AddRec->getOperand(i), L); |
| 5997 | if (OpAtScope == AddRec->getOperand(i)) |
| 5998 | continue; |
| 5999 | |
| 6000 | // Okay, at least one of these operands is loop variant but might be |
| 6001 | // foldable. Build a new instance of the folded commutative expression. |
| 6002 | SmallVector<const SCEV *, 8> NewOps(AddRec->op_begin(), |
| 6003 | AddRec->op_begin()+i); |
| 6004 | NewOps.push_back(OpAtScope); |
| 6005 | for (++i; i != e; ++i) |
| 6006 | NewOps.push_back(getSCEVAtScope(AddRec->getOperand(i), L)); |
| 6007 | |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 6008 | const SCEV *FoldedRec = |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6009 | getAddRecExpr(NewOps, AddRec->getLoop(), |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 6010 | AddRec->getNoWrapFlags(SCEV::FlagNW)); |
| 6011 | AddRec = dyn_cast<SCEVAddRecExpr>(FoldedRec); |
Andrew Trick | 01eff82 | 2011-04-27 05:42:17 +0000 | [diff] [blame] | 6012 | // The addrec may be folded to a nonrecurrence, for example, if the |
| 6013 | // induction variable is multiplied by zero after constant folding. Go |
| 6014 | // ahead and return the folded value. |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 6015 | if (!AddRec) |
| 6016 | return FoldedRec; |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 6017 | break; |
| 6018 | } |
| 6019 | |
| 6020 | // If the scope is outside the addrec's loop, evaluate it by using the |
| 6021 | // loop exit value of the addrec. |
| 6022 | if (!AddRec->getLoop()->contains(L)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6023 | // To evaluate this recurrence, we need to know how many times the AddRec |
| 6024 | // loop iterates. Compute this now. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6025 | const SCEV *BackedgeTakenCount = getBackedgeTakenCount(AddRec->getLoop()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6026 | if (BackedgeTakenCount == getCouldNotCompute()) return AddRec; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6027 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 6028 | // Then, evaluate the AddRec. |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6029 | return AddRec->evaluateAtIteration(BackedgeTakenCount, *this); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6030 | } |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 6031 | |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 6032 | return AddRec; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6033 | } |
| 6034 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 6035 | if (const SCEVZeroExtendExpr *Cast = dyn_cast<SCEVZeroExtendExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6036 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 6037 | if (Op == Cast->getOperand()) |
| 6038 | return Cast; // must be loop invariant |
| 6039 | return getZeroExtendExpr(Op, Cast->getType()); |
| 6040 | } |
| 6041 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 6042 | if (const SCEVSignExtendExpr *Cast = dyn_cast<SCEVSignExtendExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6043 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 6044 | if (Op == Cast->getOperand()) |
| 6045 | return Cast; // must be loop invariant |
| 6046 | return getSignExtendExpr(Op, Cast->getType()); |
| 6047 | } |
| 6048 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 6049 | if (const SCEVTruncateExpr *Cast = dyn_cast<SCEVTruncateExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6050 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 6051 | if (Op == Cast->getOperand()) |
| 6052 | return Cast; // must be loop invariant |
| 6053 | return getTruncateExpr(Op, Cast->getType()); |
| 6054 | } |
| 6055 | |
Torok Edwin | fbcc663 | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 6056 | llvm_unreachable("Unknown SCEV type!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6057 | } |
| 6058 | |
Dan Gohman | b81f47d | 2009-05-08 20:38:54 +0000 | [diff] [blame] | 6059 | /// getSCEVAtScope - This is a convenience function which does |
| 6060 | /// getSCEVAtScope(getSCEV(V), L). |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6061 | const SCEV *ScalarEvolution::getSCEVAtScope(Value *V, const Loop *L) { |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6062 | return getSCEVAtScope(getSCEV(V), L); |
| 6063 | } |
| 6064 | |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 6065 | /// SolveLinEquationWithOverflow - Finds the minimum unsigned root of the |
| 6066 | /// following equation: |
| 6067 | /// |
| 6068 | /// A * X = B (mod N) |
| 6069 | /// |
| 6070 | /// where N = 2^BW and BW is the common bit width of A and B. The signedness of |
| 6071 | /// A and B isn't important. |
| 6072 | /// |
| 6073 | /// If the equation does not have a solution, SCEVCouldNotCompute is returned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6074 | static const SCEV *SolveLinEquationWithOverflow(const APInt &A, const APInt &B, |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 6075 | ScalarEvolution &SE) { |
| 6076 | uint32_t BW = A.getBitWidth(); |
| 6077 | assert(BW == B.getBitWidth() && "Bit widths must be the same."); |
| 6078 | assert(A != 0 && "A must be non-zero."); |
| 6079 | |
| 6080 | // 1. D = gcd(A, N) |
| 6081 | // |
| 6082 | // The gcd of A and N may have only one prime factor: 2. The number of |
| 6083 | // trailing zeros in A is its multiplicity |
| 6084 | uint32_t Mult2 = A.countTrailingZeros(); |
| 6085 | // D = 2^Mult2 |
| 6086 | |
| 6087 | // 2. Check if B is divisible by D. |
| 6088 | // |
| 6089 | // B is divisible by D if and only if the multiplicity of prime factor 2 for B |
| 6090 | // is not less than multiplicity of this prime factor for D. |
| 6091 | if (B.countTrailingZeros() < Mult2) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 6092 | return SE.getCouldNotCompute(); |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 6093 | |
| 6094 | // 3. Compute I: the multiplicative inverse of (A / D) in arithmetic |
| 6095 | // modulo (N / D). |
| 6096 | // |
| 6097 | // (N / D) may need BW+1 bits in its representation. Hence, we'll use this |
| 6098 | // bit width during computations. |
| 6099 | APInt AD = A.lshr(Mult2).zext(BW + 1); // AD = A / D |
| 6100 | APInt Mod(BW + 1, 0); |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 6101 | Mod.setBit(BW - Mult2); // Mod = N / D |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 6102 | APInt I = AD.multiplicativeInverse(Mod); |
| 6103 | |
| 6104 | // 4. Compute the minimum unsigned root of the equation: |
| 6105 | // I * (B / D) mod (N / D) |
| 6106 | APInt Result = (I * B.lshr(Mult2).zext(BW + 1)).urem(Mod); |
| 6107 | |
| 6108 | // The result is guaranteed to be less than 2^BW so we may truncate it to BW |
| 6109 | // bits. |
| 6110 | return SE.getConstant(Result.trunc(BW)); |
| 6111 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6112 | |
| 6113 | /// SolveQuadraticEquation - Find the roots of the quadratic equation for the |
| 6114 | /// given quadratic chrec {L,+,M,+,N}. This returns either the two roots (which |
| 6115 | /// might be the same) or two SCEVCouldNotCompute objects. |
| 6116 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6117 | static std::pair<const SCEV *,const SCEV *> |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 6118 | SolveQuadraticEquation(const SCEVAddRecExpr *AddRec, ScalarEvolution &SE) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6119 | assert(AddRec->getNumOperands() == 3 && "This is not a quadratic chrec!"); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6120 | const SCEVConstant *LC = dyn_cast<SCEVConstant>(AddRec->getOperand(0)); |
| 6121 | const SCEVConstant *MC = dyn_cast<SCEVConstant>(AddRec->getOperand(1)); |
| 6122 | const SCEVConstant *NC = dyn_cast<SCEVConstant>(AddRec->getOperand(2)); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6123 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6124 | // We currently can only solve this if the coefficients are constants. |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6125 | if (!LC || !MC || !NC) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6126 | const SCEV *CNC = SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6127 | return std::make_pair(CNC, CNC); |
| 6128 | } |
| 6129 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6130 | uint32_t BitWidth = LC->getValue()->getValue().getBitWidth(); |
Chris Lattner | cad61e8 | 2007-04-15 19:52:49 +0000 | [diff] [blame] | 6131 | const APInt &L = LC->getValue()->getValue(); |
| 6132 | const APInt &M = MC->getValue()->getValue(); |
| 6133 | const APInt &N = NC->getValue()->getValue(); |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6134 | APInt Two(BitWidth, 2); |
| 6135 | APInt Four(BitWidth, 4); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6136 | |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 6137 | { |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6138 | using namespace APIntOps; |
Zhou Sheng | 2852d99 | 2007-04-07 17:48:27 +0000 | [diff] [blame] | 6139 | const APInt& C = L; |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6140 | // Convert from chrec coefficients to polynomial coefficients AX^2+BX+C |
| 6141 | // The B coefficient is M-N/2 |
| 6142 | APInt B(M); |
| 6143 | B -= sdiv(N,Two); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6144 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6145 | // The A coefficient is N/2 |
Zhou Sheng | 2852d99 | 2007-04-07 17:48:27 +0000 | [diff] [blame] | 6146 | APInt A(N.sdiv(Two)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6147 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6148 | // Compute the B^2-4ac term. |
| 6149 | APInt SqrtTerm(B); |
| 6150 | SqrtTerm *= B; |
| 6151 | SqrtTerm -= Four * (A * C); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6152 | |
Nick Lewycky | fb78083 | 2012-08-01 09:14:36 +0000 | [diff] [blame] | 6153 | if (SqrtTerm.isNegative()) { |
| 6154 | // The loop is provably infinite. |
| 6155 | const SCEV *CNC = SE.getCouldNotCompute(); |
| 6156 | return std::make_pair(CNC, CNC); |
| 6157 | } |
| 6158 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6159 | // Compute sqrt(B^2-4ac). This is guaranteed to be the nearest |
| 6160 | // integer value or else APInt::sqrt() will assert. |
| 6161 | APInt SqrtVal(SqrtTerm.sqrt()); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6162 | |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 6163 | // Compute the two solutions for the quadratic formula. |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 6164 | // The divisions must be performed as signed divisions. |
| 6165 | APInt NegB(-B); |
Nick Lewycky | 3155552 | 2011-10-03 07:10:45 +0000 | [diff] [blame] | 6166 | APInt TwoA(A << 1); |
Nick Lewycky | 7b14e20 | 2008-11-03 02:43:49 +0000 | [diff] [blame] | 6167 | if (TwoA.isMinValue()) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6168 | const SCEV *CNC = SE.getCouldNotCompute(); |
Nick Lewycky | 7b14e20 | 2008-11-03 02:43:49 +0000 | [diff] [blame] | 6169 | return std::make_pair(CNC, CNC); |
| 6170 | } |
| 6171 | |
Owen Anderson | 47db941 | 2009-07-22 00:24:57 +0000 | [diff] [blame] | 6172 | LLVMContext &Context = SE.getContext(); |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 6173 | |
| 6174 | ConstantInt *Solution1 = |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 6175 | ConstantInt::get(Context, (NegB + SqrtVal).sdiv(TwoA)); |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 6176 | ConstantInt *Solution2 = |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 6177 | ConstantInt::get(Context, (NegB - SqrtVal).sdiv(TwoA)); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6178 | |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 6179 | return std::make_pair(SE.getConstant(Solution1), |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 6180 | SE.getConstant(Solution2)); |
Nick Lewycky | 3155552 | 2011-10-03 07:10:45 +0000 | [diff] [blame] | 6181 | } // end APIntOps namespace |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6182 | } |
| 6183 | |
| 6184 | /// HowFarToZero - Return the number of times a backedge comparing the specified |
Dan Gohman | 4c720c0 | 2009-06-06 14:37:11 +0000 | [diff] [blame] | 6185 | /// value to zero will execute. If not computable, return CouldNotCompute. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6186 | /// |
| 6187 | /// This is only used for loops with a "x != y" exit test. The exit condition is |
| 6188 | /// now expressed as a single expression, V = x-y. So the exit test is |
| 6189 | /// effectively V != 0. We know and take advantage of the fact that this |
| 6190 | /// expression only being used in a comparison by zero context. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6191 | ScalarEvolution::ExitLimit |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6192 | ScalarEvolution::HowFarToZero(const SCEV *V, const Loop *L, bool ControlsExit) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6193 | // If the value is a constant |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 6194 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(V)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6195 | // If the value is already zero, the branch will execute zero times. |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 6196 | if (C->getValue()->isZero()) return C; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6197 | return getCouldNotCompute(); // Otherwise it will loop infinitely. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6198 | } |
| 6199 | |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6200 | const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(V); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6201 | if (!AddRec || AddRec->getLoop() != L) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6202 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6203 | |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6204 | // If this is a quadratic (3-term) AddRec {L,+,M,+,N}, find the roots of |
| 6205 | // the quadratic equation to solve it. |
| 6206 | if (AddRec->isQuadratic() && AddRec->getType()->isIntegerTy()) { |
| 6207 | std::pair<const SCEV *,const SCEV *> Roots = |
| 6208 | SolveQuadraticEquation(AddRec, *this); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6209 | const SCEVConstant *R1 = dyn_cast<SCEVConstant>(Roots.first); |
| 6210 | const SCEVConstant *R2 = dyn_cast<SCEVConstant>(Roots.second); |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6211 | if (R1 && R2) { |
Chris Lattner | 0916921 | 2004-04-02 20:26:46 +0000 | [diff] [blame] | 6212 | #if 0 |
David Greene | df1c497 | 2009-12-23 22:18:14 +0000 | [diff] [blame] | 6213 | dbgs() << "HFTZ: " << *V << " - sol#1: " << *R1 |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 6214 | << " sol#2: " << *R2 << "\n"; |
Chris Lattner | 0916921 | 2004-04-02 20:26:46 +0000 | [diff] [blame] | 6215 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6216 | // Pick the smallest positive root value. |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 6217 | if (ConstantInt *CB = |
Chris Lattner | 28f140a | 2011-01-09 22:58:47 +0000 | [diff] [blame] | 6218 | dyn_cast<ConstantInt>(ConstantExpr::getICmp(CmpInst::ICMP_ULT, |
| 6219 | R1->getValue(), |
| 6220 | R2->getValue()))) { |
David Blaikie | dc3f01e | 2015-03-09 01:57:13 +0000 | [diff] [blame] | 6221 | if (!CB->getZExtValue()) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6222 | std::swap(R1, R2); // R1 is the minimum root now. |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 6223 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6224 | // We can only use this value if the chrec ends up with an exact zero |
| 6225 | // value at this index. When solving for "X*X != 5", for example, we |
| 6226 | // should not accept a root of 2. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6227 | const SCEV *Val = AddRec->evaluateAtIteration(R1, *this); |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 6228 | if (Val->isZero()) |
| 6229 | return R1; // We found a quadratic root! |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6230 | } |
| 6231 | } |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6232 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6233 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6234 | |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6235 | // Otherwise we can only handle this if it is affine. |
| 6236 | if (!AddRec->isAffine()) |
| 6237 | return getCouldNotCompute(); |
| 6238 | |
| 6239 | // If this is an affine expression, the execution count of this branch is |
| 6240 | // the minimum unsigned root of the following equation: |
| 6241 | // |
| 6242 | // Start + Step*N = 0 (mod 2^BW) |
| 6243 | // |
| 6244 | // equivalent to: |
| 6245 | // |
| 6246 | // Step*N = -Start (mod 2^BW) |
| 6247 | // |
| 6248 | // where BW is the common bit width of Start and Step. |
| 6249 | |
| 6250 | // Get the initial value for the loop. |
| 6251 | const SCEV *Start = getSCEVAtScope(AddRec->getStart(), L->getParentLoop()); |
| 6252 | const SCEV *Step = getSCEVAtScope(AddRec->getOperand(1), L->getParentLoop()); |
| 6253 | |
| 6254 | // For now we handle only constant steps. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6255 | // |
| 6256 | // TODO: Handle a nonconstant Step given AddRec<NUW>. If the |
| 6257 | // AddRec is NUW, then (in an unsigned sense) it cannot be counting up to wrap |
| 6258 | // to 0, it must be counting down to equal 0. Consequently, N = Start / -Step. |
| 6259 | // We have not yet seen any such cases. |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6260 | const SCEVConstant *StepC = dyn_cast<SCEVConstant>(Step); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 6261 | if (!StepC || StepC->getValue()->equalsInt(0)) |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6262 | return getCouldNotCompute(); |
| 6263 | |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6264 | // For positive steps (counting up until unsigned overflow): |
| 6265 | // N = -Start/Step (as unsigned) |
| 6266 | // For negative steps (counting down to zero): |
| 6267 | // N = Start/-Step |
| 6268 | // First compute the unsigned distance from zero in the direction of Step. |
Andrew Trick | f1781db | 2011-03-14 17:28:02 +0000 | [diff] [blame] | 6269 | bool CountDown = StepC->getValue()->getValue().isNegative(); |
| 6270 | const SCEV *Distance = CountDown ? Start : getNegativeSCEV(Start); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6271 | |
| 6272 | // Handle unitary steps, which cannot wraparound. |
Andrew Trick | f1781db | 2011-03-14 17:28:02 +0000 | [diff] [blame] | 6273 | // 1*N = -Start; -1*N = Start (mod 2^BW), so: |
| 6274 | // N = Distance (as unsigned) |
Nick Lewycky | 3155552 | 2011-10-03 07:10:45 +0000 | [diff] [blame] | 6275 | if (StepC->getValue()->equalsInt(1) || StepC->getValue()->isAllOnesValue()) { |
| 6276 | ConstantRange CR = getUnsignedRange(Start); |
| 6277 | const SCEV *MaxBECount; |
| 6278 | if (!CountDown && CR.getUnsignedMin().isMinValue()) |
| 6279 | // When counting up, the worst starting value is 1, not 0. |
| 6280 | MaxBECount = CR.getUnsignedMax().isMinValue() |
| 6281 | ? getConstant(APInt::getMinValue(CR.getBitWidth())) |
| 6282 | : getConstant(APInt::getMaxValue(CR.getBitWidth())); |
| 6283 | else |
| 6284 | MaxBECount = getConstant(CountDown ? CR.getUnsignedMax() |
| 6285 | : -CR.getUnsignedMin()); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6286 | return ExitLimit(Distance, MaxBECount); |
Nick Lewycky | 3155552 | 2011-10-03 07:10:45 +0000 | [diff] [blame] | 6287 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 6288 | |
Mark Heffernan | acbed5e | 2014-12-15 21:19:53 +0000 | [diff] [blame] | 6289 | // As a special case, handle the instance where Step is a positive power of |
| 6290 | // two. In this case, determining whether Step divides Distance evenly can be |
| 6291 | // done by counting and comparing the number of trailing zeros of Step and |
| 6292 | // Distance. |
| 6293 | if (!CountDown) { |
| 6294 | const APInt &StepV = StepC->getValue()->getValue(); |
| 6295 | // StepV.isPowerOf2() returns true if StepV is an positive power of two. It |
| 6296 | // also returns true if StepV is maximally negative (eg, INT_MIN), but that |
| 6297 | // case is not handled as this code is guarded by !CountDown. |
| 6298 | if (StepV.isPowerOf2() && |
Sanjoy Das | f3132d3 | 2015-09-10 05:27:38 +0000 | [diff] [blame] | 6299 | GetMinTrailingZeros(Distance) >= StepV.countTrailingZeros()) { |
| 6300 | // Here we've constrained the equation to be of the form |
| 6301 | // |
| 6302 | // 2^(N + k) * Distance' = (StepV == 2^N) * X (mod 2^W) ... (0) |
| 6303 | // |
| 6304 | // where we're operating on a W bit wide integer domain and k is |
| 6305 | // non-negative. The smallest unsigned solution for X is the trip count. |
| 6306 | // |
| 6307 | // (0) is equivalent to: |
| 6308 | // |
| 6309 | // 2^(N + k) * Distance' - 2^N * X = L * 2^W |
| 6310 | // <=> 2^N(2^k * Distance' - X) = L * 2^(W - N) * 2^N |
| 6311 | // <=> 2^k * Distance' - X = L * 2^(W - N) |
| 6312 | // <=> 2^k * Distance' = L * 2^(W - N) + X ... (1) |
| 6313 | // |
| 6314 | // The smallest X satisfying (1) is unsigned remainder of dividing the LHS |
| 6315 | // by 2^(W - N). |
| 6316 | // |
| 6317 | // <=> X = 2^k * Distance' URem 2^(W - N) ... (2) |
| 6318 | // |
| 6319 | // E.g. say we're solving |
| 6320 | // |
| 6321 | // 2 * Val = 2 * X (in i8) ... (3) |
| 6322 | // |
| 6323 | // then from (2), we get X = Val URem i8 128 (k = 0 in this case). |
| 6324 | // |
| 6325 | // Note: It is tempting to solve (3) by setting X = Val, but Val is not |
| 6326 | // necessarily the smallest unsigned value of X that satisfies (3). |
| 6327 | // E.g. if Val is i8 -127 then the smallest value of X that satisfies (3) |
| 6328 | // is i8 1, not i8 -127 |
| 6329 | |
| 6330 | const auto *ModuloResult = getUDivExactExpr(Distance, Step); |
| 6331 | |
| 6332 | // Since SCEV does not have a URem node, we construct one using a truncate |
| 6333 | // and a zero extend. |
| 6334 | |
| 6335 | unsigned NarrowWidth = StepV.getBitWidth() - StepV.countTrailingZeros(); |
| 6336 | auto *NarrowTy = IntegerType::get(getContext(), NarrowWidth); |
| 6337 | auto *WideTy = Distance->getType(); |
| 6338 | |
| 6339 | return getZeroExtendExpr(getTruncateExpr(ModuloResult, NarrowTy), WideTy); |
| 6340 | } |
Mark Heffernan | acbed5e | 2014-12-15 21:19:53 +0000 | [diff] [blame] | 6341 | } |
Benjamin Kramer | e75eaca | 2014-03-25 16:25:12 +0000 | [diff] [blame] | 6342 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6343 | // If the condition controls loop exit (the loop exits only if the expression |
| 6344 | // is true) and the addition is no-wrap we can use unsigned divide to |
| 6345 | // compute the backedge count. In this case, the step may not divide the |
| 6346 | // distance, but we don't care because if the condition is "missed" the loop |
| 6347 | // will have undefined behavior due to wrapping. |
| 6348 | if (ControlsExit && AddRec->getNoWrapFlags(SCEV::FlagNW)) { |
| 6349 | const SCEV *Exact = |
| 6350 | getUDivExpr(Distance, CountDown ? getNegativeSCEV(Step) : Step); |
| 6351 | return ExitLimit(Exact, Exact); |
| 6352 | } |
Benjamin Kramer | e75eaca | 2014-03-25 16:25:12 +0000 | [diff] [blame] | 6353 | |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 6354 | // Then, try to solve the above equation provided that Start is constant. |
| 6355 | if (const SCEVConstant *StartC = dyn_cast<SCEVConstant>(Start)) |
| 6356 | return SolveLinEquationWithOverflow(StepC->getValue()->getValue(), |
| 6357 | -StartC->getValue()->getValue(), |
| 6358 | *this); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6359 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6360 | } |
| 6361 | |
| 6362 | /// HowFarToNonZero - Return the number of times a backedge checking the |
| 6363 | /// specified value for nonzero will execute. If not computable, return |
Dan Gohman | 4c720c0 | 2009-06-06 14:37:11 +0000 | [diff] [blame] | 6364 | /// CouldNotCompute |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6365 | ScalarEvolution::ExitLimit |
Dan Gohman | ba82034 | 2010-02-24 17:31:30 +0000 | [diff] [blame] | 6366 | ScalarEvolution::HowFarToNonZero(const SCEV *V, const Loop *L) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6367 | // Loops that look like: while (X == 0) are very strange indeed. We don't |
| 6368 | // handle them yet except for the trivial case. This could be expanded in the |
| 6369 | // future as needed. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6370 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6371 | // If the value is a constant, check to see if it is known to be non-zero |
| 6372 | // already. If so, the backedge will execute zero times. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 6373 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(V)) { |
Nick Lewycky | 5a3db14 | 2008-02-21 09:14:53 +0000 | [diff] [blame] | 6374 | if (!C->getValue()->isNullValue()) |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 6375 | return getConstant(C->getType(), 0); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6376 | return getCouldNotCompute(); // Otherwise it will loop infinitely. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6377 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 6378 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6379 | // We could implement others, but I really doubt anyone writes loops like |
| 6380 | // this, and if they did, they would already be constant folded. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6381 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6382 | } |
| 6383 | |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 6384 | /// getPredecessorWithUniqueSuccessorForBB - Return a predecessor of BB |
| 6385 | /// (which may not be an immediate predecessor) which has exactly one |
| 6386 | /// successor from which BB is reachable, or null if no such block is |
| 6387 | /// found. |
| 6388 | /// |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 6389 | std::pair<BasicBlock *, BasicBlock *> |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6390 | ScalarEvolution::getPredecessorWithUniqueSuccessorForBB(BasicBlock *BB) { |
Dan Gohman | fa066ef | 2009-04-30 20:48:53 +0000 | [diff] [blame] | 6391 | // If the block has a unique predecessor, then there is no path from the |
| 6392 | // predecessor to the block that does not go through the direct edge |
| 6393 | // from the predecessor to the block. |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 6394 | if (BasicBlock *Pred = BB->getSinglePredecessor()) |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 6395 | return std::make_pair(Pred, BB); |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 6396 | |
| 6397 | // A loop's header is defined to be a block that dominates the loop. |
Dan Gohman | 8c77f1a | 2009-05-18 15:36:09 +0000 | [diff] [blame] | 6398 | // If the header has a unique predecessor outside the loop, it must be |
| 6399 | // a block that has exactly one successor that can reach the loop. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 6400 | if (Loop *L = LI.getLoopFor(BB)) |
Dan Gohman | 75c6b0b | 2010-06-22 23:43:28 +0000 | [diff] [blame] | 6401 | return std::make_pair(L->getLoopPredecessor(), L->getHeader()); |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 6402 | |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 6403 | return std::pair<BasicBlock *, BasicBlock *>(); |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 6404 | } |
| 6405 | |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 6406 | /// HasSameValue - SCEV structural equivalence is usually sufficient for |
| 6407 | /// testing whether two expressions are equal, however for the purposes of |
| 6408 | /// looking for a condition guarding a loop, it can be useful to be a little |
| 6409 | /// more general, since a front-end may have replicated the controlling |
| 6410 | /// expression. |
| 6411 | /// |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6412 | static bool HasSameValue(const SCEV *A, const SCEV *B) { |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 6413 | // Quick check to see if they are the same SCEV. |
| 6414 | if (A == B) return true; |
| 6415 | |
| 6416 | // Otherwise, if they're both SCEVUnknown, it's possible that they hold |
| 6417 | // two different instructions with the same value. Check for this case. |
| 6418 | if (const SCEVUnknown *AU = dyn_cast<SCEVUnknown>(A)) |
| 6419 | if (const SCEVUnknown *BU = dyn_cast<SCEVUnknown>(B)) |
| 6420 | if (const Instruction *AI = dyn_cast<Instruction>(AU->getValue())) |
| 6421 | if (const Instruction *BI = dyn_cast<Instruction>(BU->getValue())) |
Dan Gohman | 2d08556 | 2009-08-25 17:56:57 +0000 | [diff] [blame] | 6422 | if (AI->isIdenticalTo(BI) && !AI->mayReadFromMemory()) |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 6423 | return true; |
| 6424 | |
| 6425 | // Otherwise assume they may have a different value. |
| 6426 | return false; |
| 6427 | } |
| 6428 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6429 | /// SimplifyICmpOperands - Simplify LHS and RHS in a comparison with |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 6430 | /// predicate Pred. Return true iff any changes were made. |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6431 | /// |
| 6432 | bool ScalarEvolution::SimplifyICmpOperands(ICmpInst::Predicate &Pred, |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 6433 | const SCEV *&LHS, const SCEV *&RHS, |
| 6434 | unsigned Depth) { |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6435 | bool Changed = false; |
| 6436 | |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 6437 | // If we hit the max recursion limit bail out. |
| 6438 | if (Depth >= 3) |
| 6439 | return false; |
| 6440 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6441 | // Canonicalize a constant to the right side. |
| 6442 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(LHS)) { |
| 6443 | // Check for both operands constant. |
| 6444 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) { |
| 6445 | if (ConstantExpr::getICmp(Pred, |
| 6446 | LHSC->getValue(), |
| 6447 | RHSC->getValue())->isNullValue()) |
| 6448 | goto trivially_false; |
| 6449 | else |
| 6450 | goto trivially_true; |
| 6451 | } |
| 6452 | // Otherwise swap the operands to put the constant on the right. |
| 6453 | std::swap(LHS, RHS); |
| 6454 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 6455 | Changed = true; |
| 6456 | } |
| 6457 | |
| 6458 | // If we're comparing an addrec with a value which is loop-invariant in the |
Dan Gohman | df564ca | 2010-05-03 17:00:11 +0000 | [diff] [blame] | 6459 | // addrec's loop, put the addrec on the left. Also make a dominance check, |
| 6460 | // as both operands could be addrecs loop-invariant in each other's loop. |
| 6461 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(RHS)) { |
| 6462 | const Loop *L = AR->getLoop(); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 6463 | if (isLoopInvariant(LHS, L) && properlyDominates(LHS, L->getHeader())) { |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6464 | std::swap(LHS, RHS); |
| 6465 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 6466 | Changed = true; |
| 6467 | } |
Dan Gohman | df564ca | 2010-05-03 17:00:11 +0000 | [diff] [blame] | 6468 | } |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6469 | |
| 6470 | // If there's a constant operand, canonicalize comparisons with boundary |
| 6471 | // cases, and canonicalize *-or-equal comparisons to regular comparisons. |
| 6472 | if (const SCEVConstant *RC = dyn_cast<SCEVConstant>(RHS)) { |
| 6473 | const APInt &RA = RC->getValue()->getValue(); |
| 6474 | switch (Pred) { |
| 6475 | default: llvm_unreachable("Unexpected ICmpInst::Predicate value!"); |
| 6476 | case ICmpInst::ICMP_EQ: |
| 6477 | case ICmpInst::ICMP_NE: |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 6478 | // Fold ((-1) * %a) + %b == 0 (equivalent to %b-%a == 0) into %a == %b. |
| 6479 | if (!RA) |
| 6480 | if (const SCEVAddExpr *AE = dyn_cast<SCEVAddExpr>(LHS)) |
| 6481 | if (const SCEVMulExpr *ME = dyn_cast<SCEVMulExpr>(AE->getOperand(0))) |
Benjamin Kramer | 406a2db | 2012-05-30 18:42:43 +0000 | [diff] [blame] | 6482 | if (AE->getNumOperands() == 2 && ME->getNumOperands() == 2 && |
| 6483 | ME->getOperand(0)->isAllOnesValue()) { |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 6484 | RHS = AE->getOperand(1); |
| 6485 | LHS = ME->getOperand(1); |
| 6486 | Changed = true; |
| 6487 | } |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6488 | break; |
| 6489 | case ICmpInst::ICMP_UGE: |
| 6490 | if ((RA - 1).isMinValue()) { |
| 6491 | Pred = ICmpInst::ICMP_NE; |
| 6492 | RHS = getConstant(RA - 1); |
| 6493 | Changed = true; |
| 6494 | break; |
| 6495 | } |
| 6496 | if (RA.isMaxValue()) { |
| 6497 | Pred = ICmpInst::ICMP_EQ; |
| 6498 | Changed = true; |
| 6499 | break; |
| 6500 | } |
| 6501 | if (RA.isMinValue()) goto trivially_true; |
| 6502 | |
| 6503 | Pred = ICmpInst::ICMP_UGT; |
| 6504 | RHS = getConstant(RA - 1); |
| 6505 | Changed = true; |
| 6506 | break; |
| 6507 | case ICmpInst::ICMP_ULE: |
| 6508 | if ((RA + 1).isMaxValue()) { |
| 6509 | Pred = ICmpInst::ICMP_NE; |
| 6510 | RHS = getConstant(RA + 1); |
| 6511 | Changed = true; |
| 6512 | break; |
| 6513 | } |
| 6514 | if (RA.isMinValue()) { |
| 6515 | Pred = ICmpInst::ICMP_EQ; |
| 6516 | Changed = true; |
| 6517 | break; |
| 6518 | } |
| 6519 | if (RA.isMaxValue()) goto trivially_true; |
| 6520 | |
| 6521 | Pred = ICmpInst::ICMP_ULT; |
| 6522 | RHS = getConstant(RA + 1); |
| 6523 | Changed = true; |
| 6524 | break; |
| 6525 | case ICmpInst::ICMP_SGE: |
| 6526 | if ((RA - 1).isMinSignedValue()) { |
| 6527 | Pred = ICmpInst::ICMP_NE; |
| 6528 | RHS = getConstant(RA - 1); |
| 6529 | Changed = true; |
| 6530 | break; |
| 6531 | } |
| 6532 | if (RA.isMaxSignedValue()) { |
| 6533 | Pred = ICmpInst::ICMP_EQ; |
| 6534 | Changed = true; |
| 6535 | break; |
| 6536 | } |
| 6537 | if (RA.isMinSignedValue()) goto trivially_true; |
| 6538 | |
| 6539 | Pred = ICmpInst::ICMP_SGT; |
| 6540 | RHS = getConstant(RA - 1); |
| 6541 | Changed = true; |
| 6542 | break; |
| 6543 | case ICmpInst::ICMP_SLE: |
| 6544 | if ((RA + 1).isMaxSignedValue()) { |
| 6545 | Pred = ICmpInst::ICMP_NE; |
| 6546 | RHS = getConstant(RA + 1); |
| 6547 | Changed = true; |
| 6548 | break; |
| 6549 | } |
| 6550 | if (RA.isMinSignedValue()) { |
| 6551 | Pred = ICmpInst::ICMP_EQ; |
| 6552 | Changed = true; |
| 6553 | break; |
| 6554 | } |
| 6555 | if (RA.isMaxSignedValue()) goto trivially_true; |
| 6556 | |
| 6557 | Pred = ICmpInst::ICMP_SLT; |
| 6558 | RHS = getConstant(RA + 1); |
| 6559 | Changed = true; |
| 6560 | break; |
| 6561 | case ICmpInst::ICMP_UGT: |
| 6562 | if (RA.isMinValue()) { |
| 6563 | Pred = ICmpInst::ICMP_NE; |
| 6564 | Changed = true; |
| 6565 | break; |
| 6566 | } |
| 6567 | if ((RA + 1).isMaxValue()) { |
| 6568 | Pred = ICmpInst::ICMP_EQ; |
| 6569 | RHS = getConstant(RA + 1); |
| 6570 | Changed = true; |
| 6571 | break; |
| 6572 | } |
| 6573 | if (RA.isMaxValue()) goto trivially_false; |
| 6574 | break; |
| 6575 | case ICmpInst::ICMP_ULT: |
| 6576 | if (RA.isMaxValue()) { |
| 6577 | Pred = ICmpInst::ICMP_NE; |
| 6578 | Changed = true; |
| 6579 | break; |
| 6580 | } |
| 6581 | if ((RA - 1).isMinValue()) { |
| 6582 | Pred = ICmpInst::ICMP_EQ; |
| 6583 | RHS = getConstant(RA - 1); |
| 6584 | Changed = true; |
| 6585 | break; |
| 6586 | } |
| 6587 | if (RA.isMinValue()) goto trivially_false; |
| 6588 | break; |
| 6589 | case ICmpInst::ICMP_SGT: |
| 6590 | if (RA.isMinSignedValue()) { |
| 6591 | Pred = ICmpInst::ICMP_NE; |
| 6592 | Changed = true; |
| 6593 | break; |
| 6594 | } |
| 6595 | if ((RA + 1).isMaxSignedValue()) { |
| 6596 | Pred = ICmpInst::ICMP_EQ; |
| 6597 | RHS = getConstant(RA + 1); |
| 6598 | Changed = true; |
| 6599 | break; |
| 6600 | } |
| 6601 | if (RA.isMaxSignedValue()) goto trivially_false; |
| 6602 | break; |
| 6603 | case ICmpInst::ICMP_SLT: |
| 6604 | if (RA.isMaxSignedValue()) { |
| 6605 | Pred = ICmpInst::ICMP_NE; |
| 6606 | Changed = true; |
| 6607 | break; |
| 6608 | } |
| 6609 | if ((RA - 1).isMinSignedValue()) { |
| 6610 | Pred = ICmpInst::ICMP_EQ; |
| 6611 | RHS = getConstant(RA - 1); |
| 6612 | Changed = true; |
| 6613 | break; |
| 6614 | } |
| 6615 | if (RA.isMinSignedValue()) goto trivially_false; |
| 6616 | break; |
| 6617 | } |
| 6618 | } |
| 6619 | |
| 6620 | // Check for obvious equality. |
| 6621 | if (HasSameValue(LHS, RHS)) { |
| 6622 | if (ICmpInst::isTrueWhenEqual(Pred)) |
| 6623 | goto trivially_true; |
| 6624 | if (ICmpInst::isFalseWhenEqual(Pred)) |
| 6625 | goto trivially_false; |
| 6626 | } |
| 6627 | |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6628 | // If possible, canonicalize GE/LE comparisons to GT/LT comparisons, by |
| 6629 | // adding or subtracting 1 from one of the operands. |
| 6630 | switch (Pred) { |
| 6631 | case ICmpInst::ICMP_SLE: |
| 6632 | if (!getSignedRange(RHS).getSignedMax().isMaxSignedValue()) { |
| 6633 | RHS = getAddExpr(getConstant(RHS->getType(), 1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6634 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6635 | Pred = ICmpInst::ICMP_SLT; |
| 6636 | Changed = true; |
| 6637 | } else if (!getSignedRange(LHS).getSignedMin().isMinSignedValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6638 | LHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6639 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6640 | Pred = ICmpInst::ICMP_SLT; |
| 6641 | Changed = true; |
| 6642 | } |
| 6643 | break; |
| 6644 | case ICmpInst::ICMP_SGE: |
| 6645 | if (!getSignedRange(RHS).getSignedMin().isMinSignedValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6646 | RHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6647 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6648 | Pred = ICmpInst::ICMP_SGT; |
| 6649 | Changed = true; |
| 6650 | } else if (!getSignedRange(LHS).getSignedMax().isMaxSignedValue()) { |
| 6651 | LHS = getAddExpr(getConstant(RHS->getType(), 1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6652 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6653 | Pred = ICmpInst::ICMP_SGT; |
| 6654 | Changed = true; |
| 6655 | } |
| 6656 | break; |
| 6657 | case ICmpInst::ICMP_ULE: |
| 6658 | if (!getUnsignedRange(RHS).getUnsignedMax().isMaxValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6659 | RHS = getAddExpr(getConstant(RHS->getType(), 1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6660 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6661 | Pred = ICmpInst::ICMP_ULT; |
| 6662 | Changed = true; |
| 6663 | } else if (!getUnsignedRange(LHS).getUnsignedMin().isMinValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6664 | LHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6665 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6666 | Pred = ICmpInst::ICMP_ULT; |
| 6667 | Changed = true; |
| 6668 | } |
| 6669 | break; |
| 6670 | case ICmpInst::ICMP_UGE: |
| 6671 | if (!getUnsignedRange(RHS).getUnsignedMin().isMinValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6672 | RHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6673 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6674 | Pred = ICmpInst::ICMP_UGT; |
| 6675 | Changed = true; |
| 6676 | } else if (!getUnsignedRange(LHS).getUnsignedMax().isMaxValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 6677 | LHS = getAddExpr(getConstant(RHS->getType(), 1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 6678 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 6679 | Pred = ICmpInst::ICMP_UGT; |
| 6680 | Changed = true; |
| 6681 | } |
| 6682 | break; |
| 6683 | default: |
| 6684 | break; |
| 6685 | } |
| 6686 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6687 | // TODO: More simplifications are possible here. |
| 6688 | |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 6689 | // Recursively simplify until we either hit a recursion limit or nothing |
| 6690 | // changes. |
| 6691 | if (Changed) |
| 6692 | return SimplifyICmpOperands(Pred, LHS, RHS, Depth+1); |
| 6693 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6694 | return Changed; |
| 6695 | |
| 6696 | trivially_true: |
| 6697 | // Return 0 == 0. |
Benjamin Kramer | ddd1b7b | 2010-11-20 18:43:35 +0000 | [diff] [blame] | 6698 | LHS = RHS = getConstant(ConstantInt::getFalse(getContext())); |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6699 | Pred = ICmpInst::ICMP_EQ; |
| 6700 | return true; |
| 6701 | |
| 6702 | trivially_false: |
| 6703 | // Return 0 != 0. |
Benjamin Kramer | ddd1b7b | 2010-11-20 18:43:35 +0000 | [diff] [blame] | 6704 | LHS = RHS = getConstant(ConstantInt::getFalse(getContext())); |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 6705 | Pred = ICmpInst::ICMP_NE; |
| 6706 | return true; |
| 6707 | } |
| 6708 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6709 | bool ScalarEvolution::isKnownNegative(const SCEV *S) { |
| 6710 | return getSignedRange(S).getSignedMax().isNegative(); |
| 6711 | } |
| 6712 | |
| 6713 | bool ScalarEvolution::isKnownPositive(const SCEV *S) { |
| 6714 | return getSignedRange(S).getSignedMin().isStrictlyPositive(); |
| 6715 | } |
| 6716 | |
| 6717 | bool ScalarEvolution::isKnownNonNegative(const SCEV *S) { |
| 6718 | return !getSignedRange(S).getSignedMin().isNegative(); |
| 6719 | } |
| 6720 | |
| 6721 | bool ScalarEvolution::isKnownNonPositive(const SCEV *S) { |
| 6722 | return !getSignedRange(S).getSignedMax().isStrictlyPositive(); |
| 6723 | } |
| 6724 | |
| 6725 | bool ScalarEvolution::isKnownNonZero(const SCEV *S) { |
| 6726 | return isKnownNegative(S) || isKnownPositive(S); |
| 6727 | } |
| 6728 | |
| 6729 | bool ScalarEvolution::isKnownPredicate(ICmpInst::Predicate Pred, |
| 6730 | const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | 36cce7e | 2010-04-24 01:38:36 +0000 | [diff] [blame] | 6731 | // Canonicalize the inputs first. |
| 6732 | (void)SimplifyICmpOperands(Pred, LHS, RHS); |
| 6733 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 6734 | // If LHS or RHS is an addrec, check to see if the condition is true in |
| 6735 | // every iteration of the loop. |
Justin Bogner | cbb8438 | 2014-05-23 00:06:56 +0000 | [diff] [blame] | 6736 | // If LHS and RHS are both addrec, both conditions must be true in |
| 6737 | // every iteration of the loop. |
| 6738 | const SCEVAddRecExpr *LAR = dyn_cast<SCEVAddRecExpr>(LHS); |
| 6739 | const SCEVAddRecExpr *RAR = dyn_cast<SCEVAddRecExpr>(RHS); |
| 6740 | bool LeftGuarded = false; |
| 6741 | bool RightGuarded = false; |
| 6742 | if (LAR) { |
| 6743 | const Loop *L = LAR->getLoop(); |
| 6744 | if (isLoopEntryGuardedByCond(L, Pred, LAR->getStart(), RHS) && |
| 6745 | isLoopBackedgeGuardedByCond(L, Pred, LAR->getPostIncExpr(*this), RHS)) { |
| 6746 | if (!RAR) return true; |
| 6747 | LeftGuarded = true; |
| 6748 | } |
| 6749 | } |
| 6750 | if (RAR) { |
| 6751 | const Loop *L = RAR->getLoop(); |
| 6752 | if (isLoopEntryGuardedByCond(L, Pred, LHS, RAR->getStart()) && |
| 6753 | isLoopBackedgeGuardedByCond(L, Pred, LHS, RAR->getPostIncExpr(*this))) { |
| 6754 | if (!LAR) return true; |
| 6755 | RightGuarded = true; |
| 6756 | } |
| 6757 | } |
| 6758 | if (LeftGuarded && RightGuarded) |
| 6759 | return true; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6760 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 6761 | // Otherwise see what can be done with known constant ranges. |
| 6762 | return isKnownPredicateWithRanges(Pred, LHS, RHS); |
| 6763 | } |
| 6764 | |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6765 | bool ScalarEvolution::isMonotonicPredicate(const SCEVAddRecExpr *LHS, |
| 6766 | ICmpInst::Predicate Pred, |
| 6767 | bool &Increasing) { |
| 6768 | bool Result = isMonotonicPredicateImpl(LHS, Pred, Increasing); |
| 6769 | |
| 6770 | #ifndef NDEBUG |
| 6771 | // Verify an invariant: inverting the predicate should turn a monotonically |
| 6772 | // increasing change to a monotonically decreasing one, and vice versa. |
| 6773 | bool IncreasingSwapped; |
| 6774 | bool ResultSwapped = isMonotonicPredicateImpl( |
| 6775 | LHS, ICmpInst::getSwappedPredicate(Pred), IncreasingSwapped); |
| 6776 | |
| 6777 | assert(Result == ResultSwapped && "should be able to analyze both!"); |
| 6778 | if (ResultSwapped) |
| 6779 | assert(Increasing == !IncreasingSwapped && |
| 6780 | "monotonicity should flip as we flip the predicate"); |
| 6781 | #endif |
| 6782 | |
| 6783 | return Result; |
| 6784 | } |
| 6785 | |
| 6786 | bool ScalarEvolution::isMonotonicPredicateImpl(const SCEVAddRecExpr *LHS, |
| 6787 | ICmpInst::Predicate Pred, |
| 6788 | bool &Increasing) { |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6789 | |
| 6790 | // A zero step value for LHS means the induction variable is essentially a |
| 6791 | // loop invariant value. We don't really depend on the predicate actually |
| 6792 | // flipping from false to true (for increasing predicates, and the other way |
| 6793 | // around for decreasing predicates), all we care about is that *if* the |
| 6794 | // predicate changes then it only changes from false to true. |
| 6795 | // |
| 6796 | // A zero step value in itself is not very useful, but there may be places |
| 6797 | // where SCEV can prove X >= 0 but not prove X > 0, so it is helpful to be |
| 6798 | // as general as possible. |
| 6799 | |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 6800 | switch (Pred) { |
| 6801 | default: |
| 6802 | return false; // Conservative answer |
| 6803 | |
| 6804 | case ICmpInst::ICMP_UGT: |
| 6805 | case ICmpInst::ICMP_UGE: |
| 6806 | case ICmpInst::ICMP_ULT: |
| 6807 | case ICmpInst::ICMP_ULE: |
| 6808 | if (!LHS->getNoWrapFlags(SCEV::FlagNUW)) |
| 6809 | return false; |
| 6810 | |
| 6811 | Increasing = Pred == ICmpInst::ICMP_UGT || Pred == ICmpInst::ICMP_UGE; |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6812 | return true; |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 6813 | |
| 6814 | case ICmpInst::ICMP_SGT: |
| 6815 | case ICmpInst::ICMP_SGE: |
| 6816 | case ICmpInst::ICMP_SLT: |
| 6817 | case ICmpInst::ICMP_SLE: { |
| 6818 | if (!LHS->getNoWrapFlags(SCEV::FlagNSW)) |
| 6819 | return false; |
| 6820 | |
| 6821 | const SCEV *Step = LHS->getStepRecurrence(*this); |
| 6822 | |
| 6823 | if (isKnownNonNegative(Step)) { |
| 6824 | Increasing = Pred == ICmpInst::ICMP_SGT || Pred == ICmpInst::ICMP_SGE; |
| 6825 | return true; |
| 6826 | } |
| 6827 | |
| 6828 | if (isKnownNonPositive(Step)) { |
| 6829 | Increasing = Pred == ICmpInst::ICMP_SLT || Pred == ICmpInst::ICMP_SLE; |
| 6830 | return true; |
| 6831 | } |
| 6832 | |
| 6833 | return false; |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6834 | } |
| 6835 | |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6836 | } |
| 6837 | |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 6838 | llvm_unreachable("switch has default clause!"); |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 6839 | } |
| 6840 | |
| 6841 | bool ScalarEvolution::isLoopInvariantPredicate( |
| 6842 | ICmpInst::Predicate Pred, const SCEV *LHS, const SCEV *RHS, const Loop *L, |
| 6843 | ICmpInst::Predicate &InvariantPred, const SCEV *&InvariantLHS, |
| 6844 | const SCEV *&InvariantRHS) { |
| 6845 | |
| 6846 | // If there is a loop-invariant, force it into the RHS, otherwise bail out. |
| 6847 | if (!isLoopInvariant(RHS, L)) { |
| 6848 | if (!isLoopInvariant(LHS, L)) |
| 6849 | return false; |
| 6850 | |
| 6851 | std::swap(LHS, RHS); |
| 6852 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 6853 | } |
| 6854 | |
| 6855 | const SCEVAddRecExpr *ArLHS = dyn_cast<SCEVAddRecExpr>(LHS); |
| 6856 | if (!ArLHS || ArLHS->getLoop() != L) |
| 6857 | return false; |
| 6858 | |
| 6859 | bool Increasing; |
| 6860 | if (!isMonotonicPredicate(ArLHS, Pred, Increasing)) |
| 6861 | return false; |
| 6862 | |
| 6863 | // If the predicate "ArLHS `Pred` RHS" monotonically increases from false to |
| 6864 | // true as the loop iterates, and the backedge is control dependent on |
| 6865 | // "ArLHS `Pred` RHS" == true then we can reason as follows: |
| 6866 | // |
| 6867 | // * if the predicate was false in the first iteration then the predicate |
| 6868 | // is never evaluated again, since the loop exits without taking the |
| 6869 | // backedge. |
| 6870 | // * if the predicate was true in the first iteration then it will |
| 6871 | // continue to be true for all future iterations since it is |
| 6872 | // monotonically increasing. |
| 6873 | // |
| 6874 | // For both the above possibilities, we can replace the loop varying |
| 6875 | // predicate with its value on the first iteration of the loop (which is |
| 6876 | // loop invariant). |
| 6877 | // |
| 6878 | // A similar reasoning applies for a monotonically decreasing predicate, by |
| 6879 | // replacing true with false and false with true in the above two bullets. |
| 6880 | |
| 6881 | auto P = Increasing ? Pred : ICmpInst::getInversePredicate(Pred); |
| 6882 | |
| 6883 | if (!isLoopBackedgeGuardedByCond(L, P, LHS, RHS)) |
| 6884 | return false; |
| 6885 | |
| 6886 | InvariantPred = Pred; |
| 6887 | InvariantLHS = ArLHS->getStart(); |
| 6888 | InvariantRHS = RHS; |
| 6889 | return true; |
| 6890 | } |
| 6891 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 6892 | bool |
| 6893 | ScalarEvolution::isKnownPredicateWithRanges(ICmpInst::Predicate Pred, |
| 6894 | const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6895 | if (HasSameValue(LHS, RHS)) |
| 6896 | return ICmpInst::isTrueWhenEqual(Pred); |
| 6897 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 6898 | // This code is split out from isKnownPredicate because it is called from |
| 6899 | // within isLoopEntryGuardedByCond. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6900 | switch (Pred) { |
| 6901 | default: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 6902 | llvm_unreachable("Unexpected ICmpInst::Predicate value!"); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6903 | case ICmpInst::ICMP_SGT: |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6904 | std::swap(LHS, RHS); |
| 6905 | case ICmpInst::ICMP_SLT: { |
| 6906 | ConstantRange LHSRange = getSignedRange(LHS); |
| 6907 | ConstantRange RHSRange = getSignedRange(RHS); |
| 6908 | if (LHSRange.getSignedMax().slt(RHSRange.getSignedMin())) |
| 6909 | return true; |
| 6910 | if (LHSRange.getSignedMin().sge(RHSRange.getSignedMax())) |
| 6911 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6912 | break; |
| 6913 | } |
| 6914 | case ICmpInst::ICMP_SGE: |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6915 | std::swap(LHS, RHS); |
| 6916 | case ICmpInst::ICMP_SLE: { |
| 6917 | ConstantRange LHSRange = getSignedRange(LHS); |
| 6918 | ConstantRange RHSRange = getSignedRange(RHS); |
| 6919 | if (LHSRange.getSignedMax().sle(RHSRange.getSignedMin())) |
| 6920 | return true; |
| 6921 | if (LHSRange.getSignedMin().sgt(RHSRange.getSignedMax())) |
| 6922 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6923 | break; |
| 6924 | } |
| 6925 | case ICmpInst::ICMP_UGT: |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6926 | std::swap(LHS, RHS); |
| 6927 | case ICmpInst::ICMP_ULT: { |
| 6928 | ConstantRange LHSRange = getUnsignedRange(LHS); |
| 6929 | ConstantRange RHSRange = getUnsignedRange(RHS); |
| 6930 | if (LHSRange.getUnsignedMax().ult(RHSRange.getUnsignedMin())) |
| 6931 | return true; |
| 6932 | if (LHSRange.getUnsignedMin().uge(RHSRange.getUnsignedMax())) |
| 6933 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6934 | break; |
| 6935 | } |
| 6936 | case ICmpInst::ICMP_UGE: |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6937 | std::swap(LHS, RHS); |
| 6938 | case ICmpInst::ICMP_ULE: { |
| 6939 | ConstantRange LHSRange = getUnsignedRange(LHS); |
| 6940 | ConstantRange RHSRange = getUnsignedRange(RHS); |
| 6941 | if (LHSRange.getUnsignedMax().ule(RHSRange.getUnsignedMin())) |
| 6942 | return true; |
| 6943 | if (LHSRange.getUnsignedMin().ugt(RHSRange.getUnsignedMax())) |
| 6944 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6945 | break; |
| 6946 | } |
| 6947 | case ICmpInst::ICMP_NE: { |
| 6948 | if (getUnsignedRange(LHS).intersectWith(getUnsignedRange(RHS)).isEmptySet()) |
| 6949 | return true; |
| 6950 | if (getSignedRange(LHS).intersectWith(getSignedRange(RHS)).isEmptySet()) |
| 6951 | return true; |
| 6952 | |
| 6953 | const SCEV *Diff = getMinusSCEV(LHS, RHS); |
| 6954 | if (isKnownNonZero(Diff)) |
| 6955 | return true; |
| 6956 | break; |
| 6957 | } |
| 6958 | case ICmpInst::ICMP_EQ: |
Dan Gohman | 3439262 | 2009-07-20 23:54:43 +0000 | [diff] [blame] | 6959 | // The check at the top of the function catches the case where |
| 6960 | // the values are known to be equal. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6961 | break; |
| 6962 | } |
| 6963 | return false; |
| 6964 | } |
| 6965 | |
| 6966 | /// isLoopBackedgeGuardedByCond - Test whether the backedge of the loop is |
| 6967 | /// protected by a conditional between LHS and RHS. This is used to |
| 6968 | /// to eliminate casts. |
| 6969 | bool |
| 6970 | ScalarEvolution::isLoopBackedgeGuardedByCond(const Loop *L, |
| 6971 | ICmpInst::Predicate Pred, |
| 6972 | const SCEV *LHS, const SCEV *RHS) { |
| 6973 | // Interpret a null as meaning no loop, where there is obviously no guard |
| 6974 | // (interprocedural conditions notwithstanding). |
| 6975 | if (!L) return true; |
| 6976 | |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 6977 | if (isKnownPredicateWithRanges(Pred, LHS, RHS)) return true; |
| 6978 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6979 | BasicBlock *Latch = L->getLoopLatch(); |
| 6980 | if (!Latch) |
| 6981 | return false; |
| 6982 | |
| 6983 | BranchInst *LoopContinuePredicate = |
| 6984 | dyn_cast<BranchInst>(Latch->getTerminator()); |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 6985 | if (LoopContinuePredicate && LoopContinuePredicate->isConditional() && |
| 6986 | isImpliedCond(Pred, LHS, RHS, |
| 6987 | LoopContinuePredicate->getCondition(), |
| 6988 | LoopContinuePredicate->getSuccessor(0) != L->getHeader())) |
| 6989 | return true; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 6990 | |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 6991 | struct ClearWalkingBEDominatingCondsOnExit { |
| 6992 | ScalarEvolution &SE; |
| 6993 | |
| 6994 | explicit ClearWalkingBEDominatingCondsOnExit(ScalarEvolution &SE) |
Hans Wennborg | 13958b7 | 2015-07-22 20:46:11 +0000 | [diff] [blame] | 6995 | : SE(SE){} |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 6996 | |
| 6997 | ~ClearWalkingBEDominatingCondsOnExit() { |
| 6998 | SE.WalkingBEDominatingConds = false; |
| 6999 | } |
| 7000 | }; |
| 7001 | |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 7002 | // We don't want more than one activation of the following loops on the stack |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 7003 | // -- that can lead to O(n!) time complexity. |
| 7004 | if (WalkingBEDominatingConds) |
| 7005 | return false; |
| 7006 | |
| 7007 | WalkingBEDominatingConds = true; |
| 7008 | ClearWalkingBEDominatingCondsOnExit ClearOnExit(*this); |
| 7009 | |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 7010 | // Check conditions due to any @llvm.assume intrinsics. |
| 7011 | for (auto &AssumeVH : AC.assumptions()) { |
| 7012 | if (!AssumeVH) |
| 7013 | continue; |
| 7014 | auto *CI = cast<CallInst>(AssumeVH); |
| 7015 | if (!DT.dominates(CI, Latch->getTerminator())) |
| 7016 | continue; |
| 7017 | |
| 7018 | if (isImpliedCond(Pred, LHS, RHS, CI->getArgOperand(0), false)) |
| 7019 | return true; |
| 7020 | } |
| 7021 | |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 7022 | // If the loop is not reachable from the entry block, we risk running into an |
| 7023 | // infinite loop as we walk up into the dom tree. These loops do not matter |
| 7024 | // anyway, so we just return a conservative answer when we see them. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7025 | if (!DT.isReachableFromEntry(L->getHeader())) |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 7026 | return false; |
| 7027 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7028 | for (DomTreeNode *DTN = DT[Latch], *HeaderDTN = DT[L->getHeader()]; |
| 7029 | DTN != HeaderDTN; DTN = DTN->getIDom()) { |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 7030 | |
| 7031 | assert(DTN && "should reach the loop header before reaching the root!"); |
| 7032 | |
| 7033 | BasicBlock *BB = DTN->getBlock(); |
| 7034 | BasicBlock *PBB = BB->getSinglePredecessor(); |
| 7035 | if (!PBB) |
| 7036 | continue; |
| 7037 | |
| 7038 | BranchInst *ContinuePredicate = dyn_cast<BranchInst>(PBB->getTerminator()); |
| 7039 | if (!ContinuePredicate || !ContinuePredicate->isConditional()) |
| 7040 | continue; |
| 7041 | |
| 7042 | Value *Condition = ContinuePredicate->getCondition(); |
| 7043 | |
| 7044 | // If we have an edge `E` within the loop body that dominates the only |
| 7045 | // latch, the condition guarding `E` also guards the backedge. This |
| 7046 | // reasoning works only for loops with a single latch. |
| 7047 | |
| 7048 | BasicBlockEdge DominatingEdge(PBB, BB); |
| 7049 | if (DominatingEdge.isSingleEdge()) { |
| 7050 | // We're constructively (and conservatively) enumerating edges within the |
| 7051 | // loop body that dominate the latch. The dominator tree better agree |
| 7052 | // with us on this: |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7053 | assert(DT.dominates(DominatingEdge, Latch) && "should be!"); |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 7054 | |
| 7055 | if (isImpliedCond(Pred, LHS, RHS, Condition, |
| 7056 | BB != ContinuePredicate->getSuccessor(0))) |
| 7057 | return true; |
| 7058 | } |
| 7059 | } |
| 7060 | |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 7061 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7062 | } |
| 7063 | |
Dan Gohman | b50349a | 2010-04-11 19:27:13 +0000 | [diff] [blame] | 7064 | /// isLoopEntryGuardedByCond - Test whether entry to the loop is protected |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7065 | /// by a conditional between LHS and RHS. This is used to help avoid max |
| 7066 | /// expressions in loop trip counts, and to eliminate casts. |
| 7067 | bool |
Dan Gohman | b50349a | 2010-04-11 19:27:13 +0000 | [diff] [blame] | 7068 | ScalarEvolution::isLoopEntryGuardedByCond(const Loop *L, |
| 7069 | ICmpInst::Predicate Pred, |
| 7070 | const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | 9cf09f8 | 2009-05-18 16:03:58 +0000 | [diff] [blame] | 7071 | // Interpret a null as meaning no loop, where there is obviously no guard |
| 7072 | // (interprocedural conditions notwithstanding). |
| 7073 | if (!L) return false; |
| 7074 | |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 7075 | if (isKnownPredicateWithRanges(Pred, LHS, RHS)) return true; |
| 7076 | |
Dan Gohman | 8c77f1a | 2009-05-18 15:36:09 +0000 | [diff] [blame] | 7077 | // Starting at the loop predecessor, climb up the predecessor chain, as long |
| 7078 | // as there are predecessors that can be found that have unique successors |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 7079 | // leading to the original header. |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 7080 | for (std::pair<BasicBlock *, BasicBlock *> |
Dan Gohman | 75c6b0b | 2010-06-22 23:43:28 +0000 | [diff] [blame] | 7081 | Pair(L->getLoopPredecessor(), L->getHeader()); |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 7082 | Pair.first; |
| 7083 | Pair = getPredecessorWithUniqueSuccessorForBB(Pair.first)) { |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 7084 | |
| 7085 | BranchInst *LoopEntryPredicate = |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 7086 | dyn_cast<BranchInst>(Pair.first->getTerminator()); |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 7087 | if (!LoopEntryPredicate || |
| 7088 | LoopEntryPredicate->isUnconditional()) |
| 7089 | continue; |
| 7090 | |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7091 | if (isImpliedCond(Pred, LHS, RHS, |
| 7092 | LoopEntryPredicate->getCondition(), |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 7093 | LoopEntryPredicate->getSuccessor(0) != Pair.second)) |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 7094 | return true; |
Nick Lewycky | b5688cc | 2008-07-12 07:41:32 +0000 | [diff] [blame] | 7095 | } |
| 7096 | |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 7097 | // Check conditions due to any @llvm.assume intrinsics. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7098 | for (auto &AssumeVH : AC.assumptions()) { |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 7099 | if (!AssumeVH) |
| 7100 | continue; |
| 7101 | auto *CI = cast<CallInst>(AssumeVH); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7102 | if (!DT.dominates(CI, L->getHeader())) |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 7103 | continue; |
| 7104 | |
| 7105 | if (isImpliedCond(Pred, LHS, RHS, CI->getArgOperand(0), false)) |
| 7106 | return true; |
| 7107 | } |
| 7108 | |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 7109 | return false; |
Nick Lewycky | b5688cc | 2008-07-12 07:41:32 +0000 | [diff] [blame] | 7110 | } |
| 7111 | |
Andrew Trick | 7fa4e0f | 2012-05-19 00:48:25 +0000 | [diff] [blame] | 7112 | /// RAII wrapper to prevent recursive application of isImpliedCond. |
| 7113 | /// ScalarEvolution's PendingLoopPredicates set must be empty unless we are |
| 7114 | /// currently evaluating isImpliedCond. |
| 7115 | struct MarkPendingLoopPredicate { |
| 7116 | Value *Cond; |
| 7117 | DenseSet<Value*> &LoopPreds; |
| 7118 | bool Pending; |
| 7119 | |
| 7120 | MarkPendingLoopPredicate(Value *C, DenseSet<Value*> &LP) |
| 7121 | : Cond(C), LoopPreds(LP) { |
| 7122 | Pending = !LoopPreds.insert(Cond).second; |
| 7123 | } |
| 7124 | ~MarkPendingLoopPredicate() { |
| 7125 | if (!Pending) |
| 7126 | LoopPreds.erase(Cond); |
| 7127 | } |
| 7128 | }; |
| 7129 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7130 | /// isImpliedCond - Test whether the condition described by Pred, LHS, |
| 7131 | /// and RHS is true whenever the given Cond value evaluates to true. |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7132 | bool ScalarEvolution::isImpliedCond(ICmpInst::Predicate Pred, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7133 | const SCEV *LHS, const SCEV *RHS, |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7134 | Value *FoundCondValue, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7135 | bool Inverse) { |
Andrew Trick | 7fa4e0f | 2012-05-19 00:48:25 +0000 | [diff] [blame] | 7136 | MarkPendingLoopPredicate Mark(FoundCondValue, PendingLoopPredicates); |
| 7137 | if (Mark.Pending) |
| 7138 | return false; |
| 7139 | |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 7140 | // Recursively handle And and Or conditions. |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7141 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(FoundCondValue)) { |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 7142 | if (BO->getOpcode() == Instruction::And) { |
| 7143 | if (!Inverse) |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7144 | return isImpliedCond(Pred, LHS, RHS, BO->getOperand(0), Inverse) || |
| 7145 | isImpliedCond(Pred, LHS, RHS, BO->getOperand(1), Inverse); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 7146 | } else if (BO->getOpcode() == Instruction::Or) { |
| 7147 | if (Inverse) |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7148 | return isImpliedCond(Pred, LHS, RHS, BO->getOperand(0), Inverse) || |
| 7149 | isImpliedCond(Pred, LHS, RHS, BO->getOperand(1), Inverse); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 7150 | } |
| 7151 | } |
| 7152 | |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 7153 | ICmpInst *ICI = dyn_cast<ICmpInst>(FoundCondValue); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 7154 | if (!ICI) return false; |
| 7155 | |
Andrew Trick | fa59403 | 2012-11-29 18:35:13 +0000 | [diff] [blame] | 7156 | // Now that we found a conditional branch that dominates the loop or controls |
| 7157 | // the loop latch. Check to see if it is the comparison we are looking for. |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7158 | ICmpInst::Predicate FoundPred; |
| 7159 | if (Inverse) |
| 7160 | FoundPred = ICI->getInversePredicate(); |
| 7161 | else |
| 7162 | FoundPred = ICI->getPredicate(); |
| 7163 | |
| 7164 | const SCEV *FoundLHS = getSCEV(ICI->getOperand(0)); |
| 7165 | const SCEV *FoundRHS = getSCEV(ICI->getOperand(1)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7166 | |
Sanjoy Das | 1459883 | 2015-03-26 17:28:26 +0000 | [diff] [blame] | 7167 | // Balance the types. |
| 7168 | if (getTypeSizeInBits(LHS->getType()) < |
| 7169 | getTypeSizeInBits(FoundLHS->getType())) { |
| 7170 | if (CmpInst::isSigned(Pred)) { |
| 7171 | LHS = getSignExtendExpr(LHS, FoundLHS->getType()); |
| 7172 | RHS = getSignExtendExpr(RHS, FoundLHS->getType()); |
| 7173 | } else { |
| 7174 | LHS = getZeroExtendExpr(LHS, FoundLHS->getType()); |
| 7175 | RHS = getZeroExtendExpr(RHS, FoundLHS->getType()); |
| 7176 | } |
| 7177 | } else if (getTypeSizeInBits(LHS->getType()) > |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7178 | getTypeSizeInBits(FoundLHS->getType())) { |
Stepan Dyatkovskiy | 431993b | 2014-01-09 12:26:12 +0000 | [diff] [blame] | 7179 | if (CmpInst::isSigned(FoundPred)) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7180 | FoundLHS = getSignExtendExpr(FoundLHS, LHS->getType()); |
| 7181 | FoundRHS = getSignExtendExpr(FoundRHS, LHS->getType()); |
| 7182 | } else { |
| 7183 | FoundLHS = getZeroExtendExpr(FoundLHS, LHS->getType()); |
| 7184 | FoundRHS = getZeroExtendExpr(FoundRHS, LHS->getType()); |
| 7185 | } |
| 7186 | } |
| 7187 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7188 | // Canonicalize the query to match the way instcombine will have |
| 7189 | // canonicalized the comparison. |
Dan Gohman | 3673aa1 | 2010-04-24 01:34:53 +0000 | [diff] [blame] | 7190 | if (SimplifyICmpOperands(Pred, LHS, RHS)) |
| 7191 | if (LHS == RHS) |
Dan Gohman | b5025c7 | 2010-05-03 18:00:24 +0000 | [diff] [blame] | 7192 | return CmpInst::isTrueWhenEqual(Pred); |
Benjamin Kramer | ba11a98 | 2012-11-29 19:07:57 +0000 | [diff] [blame] | 7193 | if (SimplifyICmpOperands(FoundPred, FoundLHS, FoundRHS)) |
| 7194 | if (FoundLHS == FoundRHS) |
| 7195 | return CmpInst::isFalseWhenEqual(FoundPred); |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7196 | |
| 7197 | // Check to see if we can make the LHS or RHS match. |
| 7198 | if (LHS == FoundRHS || RHS == FoundLHS) { |
| 7199 | if (isa<SCEVConstant>(RHS)) { |
| 7200 | std::swap(FoundLHS, FoundRHS); |
| 7201 | FoundPred = ICmpInst::getSwappedPredicate(FoundPred); |
| 7202 | } else { |
| 7203 | std::swap(LHS, RHS); |
| 7204 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 7205 | } |
| 7206 | } |
| 7207 | |
| 7208 | // Check whether the found predicate is the same as the desired predicate. |
| 7209 | if (FoundPred == Pred) |
| 7210 | return isImpliedCondOperands(Pred, LHS, RHS, FoundLHS, FoundRHS); |
| 7211 | |
| 7212 | // Check whether swapping the found predicate makes it the same as the |
| 7213 | // desired predicate. |
| 7214 | if (ICmpInst::getSwappedPredicate(FoundPred) == Pred) { |
| 7215 | if (isa<SCEVConstant>(RHS)) |
| 7216 | return isImpliedCondOperands(Pred, LHS, RHS, FoundRHS, FoundLHS); |
| 7217 | else |
| 7218 | return isImpliedCondOperands(ICmpInst::getSwappedPredicate(Pred), |
| 7219 | RHS, LHS, FoundLHS, FoundRHS); |
| 7220 | } |
| 7221 | |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 7222 | // Check if we can make progress by sharpening ranges. |
| 7223 | if (FoundPred == ICmpInst::ICMP_NE && |
| 7224 | (isa<SCEVConstant>(FoundLHS) || isa<SCEVConstant>(FoundRHS))) { |
| 7225 | |
| 7226 | const SCEVConstant *C = nullptr; |
| 7227 | const SCEV *V = nullptr; |
| 7228 | |
| 7229 | if (isa<SCEVConstant>(FoundLHS)) { |
| 7230 | C = cast<SCEVConstant>(FoundLHS); |
| 7231 | V = FoundRHS; |
| 7232 | } else { |
| 7233 | C = cast<SCEVConstant>(FoundRHS); |
| 7234 | V = FoundLHS; |
| 7235 | } |
| 7236 | |
| 7237 | // The guarding predicate tells us that C != V. If the known range |
| 7238 | // of V is [C, t), we can sharpen the range to [C + 1, t). The |
| 7239 | // range we consider has to correspond to same signedness as the |
| 7240 | // predicate we're interested in folding. |
| 7241 | |
| 7242 | APInt Min = ICmpInst::isSigned(Pred) ? |
| 7243 | getSignedRange(V).getSignedMin() : getUnsignedRange(V).getUnsignedMin(); |
| 7244 | |
| 7245 | if (Min == C->getValue()->getValue()) { |
| 7246 | // Given (V >= Min && V != Min) we conclude V >= (Min + 1). |
| 7247 | // This is true even if (Min + 1) wraps around -- in case of |
| 7248 | // wraparound, (Min + 1) < Min, so (V >= Min => V >= (Min + 1)). |
| 7249 | |
| 7250 | APInt SharperMin = Min + 1; |
| 7251 | |
| 7252 | switch (Pred) { |
| 7253 | case ICmpInst::ICMP_SGE: |
| 7254 | case ICmpInst::ICMP_UGE: |
| 7255 | // We know V `Pred` SharperMin. If this implies LHS `Pred` |
| 7256 | // RHS, we're done. |
| 7257 | if (isImpliedCondOperands(Pred, LHS, RHS, V, |
| 7258 | getConstant(SharperMin))) |
| 7259 | return true; |
| 7260 | |
| 7261 | case ICmpInst::ICMP_SGT: |
| 7262 | case ICmpInst::ICMP_UGT: |
| 7263 | // We know from the range information that (V `Pred` Min || |
| 7264 | // V == Min). We know from the guarding condition that !(V |
| 7265 | // == Min). This gives us |
| 7266 | // |
| 7267 | // V `Pred` Min || V == Min && !(V == Min) |
| 7268 | // => V `Pred` Min |
| 7269 | // |
| 7270 | // If V `Pred` Min implies LHS `Pred` RHS, we're done. |
| 7271 | |
| 7272 | if (isImpliedCondOperands(Pred, LHS, RHS, V, getConstant(Min))) |
| 7273 | return true; |
| 7274 | |
| 7275 | default: |
| 7276 | // No change |
| 7277 | break; |
| 7278 | } |
| 7279 | } |
| 7280 | } |
| 7281 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7282 | // Check whether the actual condition is beyond sufficient. |
| 7283 | if (FoundPred == ICmpInst::ICMP_EQ) |
| 7284 | if (ICmpInst::isTrueWhenEqual(Pred)) |
| 7285 | if (isImpliedCondOperands(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 7286 | return true; |
| 7287 | if (Pred == ICmpInst::ICMP_NE) |
| 7288 | if (!ICmpInst::isTrueWhenEqual(FoundPred)) |
| 7289 | if (isImpliedCondOperands(FoundPred, LHS, RHS, FoundLHS, FoundRHS)) |
| 7290 | return true; |
| 7291 | |
| 7292 | // Otherwise assume the worst. |
| 7293 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7294 | } |
| 7295 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7296 | /// isImpliedCondOperands - Test whether the condition described by Pred, |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 7297 | /// LHS, and RHS is true whenever the condition described by Pred, FoundLHS, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7298 | /// and FoundRHS is true. |
| 7299 | bool ScalarEvolution::isImpliedCondOperands(ICmpInst::Predicate Pred, |
| 7300 | const SCEV *LHS, const SCEV *RHS, |
| 7301 | const SCEV *FoundLHS, |
| 7302 | const SCEV *FoundRHS) { |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 7303 | if (isImpliedCondOperandsViaRanges(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 7304 | return true; |
| 7305 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7306 | return isImpliedCondOperandsHelper(Pred, LHS, RHS, |
| 7307 | FoundLHS, FoundRHS) || |
| 7308 | // ~x < ~y --> x > y |
| 7309 | isImpliedCondOperandsHelper(Pred, LHS, RHS, |
| 7310 | getNotSCEV(FoundRHS), |
| 7311 | getNotSCEV(FoundLHS)); |
| 7312 | } |
| 7313 | |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7314 | |
| 7315 | /// If Expr computes ~A, return A else return nullptr |
| 7316 | static const SCEV *MatchNotExpr(const SCEV *Expr) { |
| 7317 | const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Expr); |
| 7318 | if (!Add || Add->getNumOperands() != 2) return nullptr; |
| 7319 | |
| 7320 | const SCEVConstant *AddLHS = dyn_cast<SCEVConstant>(Add->getOperand(0)); |
| 7321 | if (!(AddLHS && AddLHS->getValue()->getValue().isAllOnesValue())) |
| 7322 | return nullptr; |
| 7323 | |
| 7324 | const SCEVMulExpr *AddRHS = dyn_cast<SCEVMulExpr>(Add->getOperand(1)); |
| 7325 | if (!AddRHS || AddRHS->getNumOperands() != 2) return nullptr; |
| 7326 | |
| 7327 | const SCEVConstant *MulLHS = dyn_cast<SCEVConstant>(AddRHS->getOperand(0)); |
| 7328 | if (!(MulLHS && MulLHS->getValue()->getValue().isAllOnesValue())) |
| 7329 | return nullptr; |
| 7330 | |
| 7331 | return AddRHS->getOperand(1); |
| 7332 | } |
| 7333 | |
| 7334 | |
| 7335 | /// Is MaybeMaxExpr an SMax or UMax of Candidate and some other values? |
| 7336 | template<typename MaxExprType> |
| 7337 | static bool IsMaxConsistingOf(const SCEV *MaybeMaxExpr, |
| 7338 | const SCEV *Candidate) { |
| 7339 | const MaxExprType *MaxExpr = dyn_cast<MaxExprType>(MaybeMaxExpr); |
| 7340 | if (!MaxExpr) return false; |
| 7341 | |
| 7342 | auto It = std::find(MaxExpr->op_begin(), MaxExpr->op_end(), Candidate); |
| 7343 | return It != MaxExpr->op_end(); |
| 7344 | } |
| 7345 | |
| 7346 | |
| 7347 | /// Is MaybeMinExpr an SMin or UMin of Candidate and some other values? |
| 7348 | template<typename MaxExprType> |
| 7349 | static bool IsMinConsistingOf(ScalarEvolution &SE, |
| 7350 | const SCEV *MaybeMinExpr, |
| 7351 | const SCEV *Candidate) { |
| 7352 | const SCEV *MaybeMaxExpr = MatchNotExpr(MaybeMinExpr); |
| 7353 | if (!MaybeMaxExpr) |
| 7354 | return false; |
| 7355 | |
| 7356 | return IsMaxConsistingOf<MaxExprType>(MaybeMaxExpr, SE.getNotSCEV(Candidate)); |
| 7357 | } |
| 7358 | |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 7359 | static bool IsKnownPredicateViaAddRecStart(ScalarEvolution &SE, |
| 7360 | ICmpInst::Predicate Pred, |
| 7361 | const SCEV *LHS, const SCEV *RHS) { |
| 7362 | |
| 7363 | // If both sides are affine addrecs for the same loop, with equal |
| 7364 | // steps, and we know the recurrences don't wrap, then we only |
| 7365 | // need to check the predicate on the starting values. |
| 7366 | |
| 7367 | if (!ICmpInst::isRelational(Pred)) |
| 7368 | return false; |
| 7369 | |
| 7370 | const SCEVAddRecExpr *LAR = dyn_cast<SCEVAddRecExpr>(LHS); |
| 7371 | if (!LAR) |
| 7372 | return false; |
| 7373 | const SCEVAddRecExpr *RAR = dyn_cast<SCEVAddRecExpr>(RHS); |
| 7374 | if (!RAR) |
| 7375 | return false; |
| 7376 | if (LAR->getLoop() != RAR->getLoop()) |
| 7377 | return false; |
| 7378 | if (!LAR->isAffine() || !RAR->isAffine()) |
| 7379 | return false; |
| 7380 | |
| 7381 | if (LAR->getStepRecurrence(SE) != RAR->getStepRecurrence(SE)) |
| 7382 | return false; |
| 7383 | |
Hal Finkel | ff08a2e | 2015-08-19 17:26:07 +0000 | [diff] [blame] | 7384 | SCEV::NoWrapFlags NW = ICmpInst::isSigned(Pred) ? |
| 7385 | SCEV::FlagNSW : SCEV::FlagNUW; |
| 7386 | if (!LAR->getNoWrapFlags(NW) || !RAR->getNoWrapFlags(NW)) |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 7387 | return false; |
| 7388 | |
| 7389 | return SE.isKnownPredicate(Pred, LAR->getStart(), RAR->getStart()); |
| 7390 | } |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7391 | |
| 7392 | /// Is LHS `Pred` RHS true on the virtue of LHS or RHS being a Min or Max |
| 7393 | /// expression? |
| 7394 | static bool IsKnownPredicateViaMinOrMax(ScalarEvolution &SE, |
| 7395 | ICmpInst::Predicate Pred, |
| 7396 | const SCEV *LHS, const SCEV *RHS) { |
| 7397 | switch (Pred) { |
| 7398 | default: |
| 7399 | return false; |
| 7400 | |
| 7401 | case ICmpInst::ICMP_SGE: |
| 7402 | std::swap(LHS, RHS); |
| 7403 | // fall through |
| 7404 | case ICmpInst::ICMP_SLE: |
| 7405 | return |
| 7406 | // min(A, ...) <= A |
| 7407 | IsMinConsistingOf<SCEVSMaxExpr>(SE, LHS, RHS) || |
| 7408 | // A <= max(A, ...) |
| 7409 | IsMaxConsistingOf<SCEVSMaxExpr>(RHS, LHS); |
| 7410 | |
| 7411 | case ICmpInst::ICMP_UGE: |
| 7412 | std::swap(LHS, RHS); |
| 7413 | // fall through |
| 7414 | case ICmpInst::ICMP_ULE: |
| 7415 | return |
| 7416 | // min(A, ...) <= A |
| 7417 | IsMinConsistingOf<SCEVUMaxExpr>(SE, LHS, RHS) || |
| 7418 | // A <= max(A, ...) |
| 7419 | IsMaxConsistingOf<SCEVUMaxExpr>(RHS, LHS); |
| 7420 | } |
| 7421 | |
| 7422 | llvm_unreachable("covered switch fell through?!"); |
| 7423 | } |
| 7424 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7425 | /// isImpliedCondOperandsHelper - Test whether the condition described by |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 7426 | /// Pred, LHS, and RHS is true whenever the condition described by Pred, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7427 | /// FoundLHS, and FoundRHS is true. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7428 | bool |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 7429 | ScalarEvolution::isImpliedCondOperandsHelper(ICmpInst::Predicate Pred, |
| 7430 | const SCEV *LHS, const SCEV *RHS, |
| 7431 | const SCEV *FoundLHS, |
| 7432 | const SCEV *FoundRHS) { |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7433 | auto IsKnownPredicateFull = |
| 7434 | [this](ICmpInst::Predicate Pred, const SCEV *LHS, const SCEV *RHS) { |
| 7435 | return isKnownPredicateWithRanges(Pred, LHS, RHS) || |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 7436 | IsKnownPredicateViaMinOrMax(*this, Pred, LHS, RHS) || |
| 7437 | IsKnownPredicateViaAddRecStart(*this, Pred, LHS, RHS); |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7438 | }; |
| 7439 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7440 | switch (Pred) { |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 7441 | default: llvm_unreachable("Unexpected ICmpInst::Predicate value!"); |
| 7442 | case ICmpInst::ICMP_EQ: |
| 7443 | case ICmpInst::ICMP_NE: |
| 7444 | if (HasSameValue(LHS, FoundLHS) && HasSameValue(RHS, FoundRHS)) |
| 7445 | return true; |
| 7446 | break; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7447 | case ICmpInst::ICMP_SLT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 7448 | case ICmpInst::ICMP_SLE: |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7449 | if (IsKnownPredicateFull(ICmpInst::ICMP_SLE, LHS, FoundLHS) && |
| 7450 | IsKnownPredicateFull(ICmpInst::ICMP_SGE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7451 | return true; |
| 7452 | break; |
| 7453 | case ICmpInst::ICMP_SGT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 7454 | case ICmpInst::ICMP_SGE: |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7455 | if (IsKnownPredicateFull(ICmpInst::ICMP_SGE, LHS, FoundLHS) && |
| 7456 | IsKnownPredicateFull(ICmpInst::ICMP_SLE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7457 | return true; |
| 7458 | break; |
| 7459 | case ICmpInst::ICMP_ULT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 7460 | case ICmpInst::ICMP_ULE: |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7461 | if (IsKnownPredicateFull(ICmpInst::ICMP_ULE, LHS, FoundLHS) && |
| 7462 | IsKnownPredicateFull(ICmpInst::ICMP_UGE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7463 | return true; |
| 7464 | break; |
| 7465 | case ICmpInst::ICMP_UGT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 7466 | case ICmpInst::ICMP_UGE: |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 7467 | if (IsKnownPredicateFull(ICmpInst::ICMP_UGE, LHS, FoundLHS) && |
| 7468 | IsKnownPredicateFull(ICmpInst::ICMP_ULE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 7469 | return true; |
| 7470 | break; |
| 7471 | } |
| 7472 | |
| 7473 | return false; |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 7474 | } |
| 7475 | |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 7476 | /// isImpliedCondOperandsViaRanges - helper function for isImpliedCondOperands. |
| 7477 | /// Tries to get cases like "X `sgt` 0 => X - 1 `sgt` -1". |
| 7478 | bool ScalarEvolution::isImpliedCondOperandsViaRanges(ICmpInst::Predicate Pred, |
| 7479 | const SCEV *LHS, |
| 7480 | const SCEV *RHS, |
| 7481 | const SCEV *FoundLHS, |
| 7482 | const SCEV *FoundRHS) { |
| 7483 | if (!isa<SCEVConstant>(RHS) || !isa<SCEVConstant>(FoundRHS)) |
| 7484 | // The restriction on `FoundRHS` be lifted easily -- it exists only to |
| 7485 | // reduce the compile time impact of this optimization. |
| 7486 | return false; |
| 7487 | |
| 7488 | const SCEVAddExpr *AddLHS = dyn_cast<SCEVAddExpr>(LHS); |
| 7489 | if (!AddLHS || AddLHS->getOperand(1) != FoundLHS || |
| 7490 | !isa<SCEVConstant>(AddLHS->getOperand(0))) |
| 7491 | return false; |
| 7492 | |
| 7493 | APInt ConstFoundRHS = cast<SCEVConstant>(FoundRHS)->getValue()->getValue(); |
| 7494 | |
| 7495 | // `FoundLHSRange` is the range we know `FoundLHS` to be in by virtue of the |
| 7496 | // antecedent "`FoundLHS` `Pred` `FoundRHS`". |
| 7497 | ConstantRange FoundLHSRange = |
| 7498 | ConstantRange::makeAllowedICmpRegion(Pred, ConstFoundRHS); |
| 7499 | |
| 7500 | // Since `LHS` is `FoundLHS` + `AddLHS->getOperand(0)`, we can compute a range |
| 7501 | // for `LHS`: |
| 7502 | APInt Addend = |
| 7503 | cast<SCEVConstant>(AddLHS->getOperand(0))->getValue()->getValue(); |
| 7504 | ConstantRange LHSRange = FoundLHSRange.add(ConstantRange(Addend)); |
| 7505 | |
| 7506 | // We can also compute the range of values for `LHS` that satisfy the |
| 7507 | // consequent, "`LHS` `Pred` `RHS`": |
| 7508 | APInt ConstRHS = cast<SCEVConstant>(RHS)->getValue()->getValue(); |
| 7509 | ConstantRange SatisfyingLHSRange = |
| 7510 | ConstantRange::makeSatisfyingICmpRegion(Pred, ConstRHS); |
| 7511 | |
| 7512 | // The antecedent implies the consequent if every value of `LHS` that |
| 7513 | // satisfies the antecedent also satisfies the consequent. |
| 7514 | return SatisfyingLHSRange.contains(LHSRange); |
| 7515 | } |
| 7516 | |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7517 | // Verify if an linear IV with positive stride can overflow when in a |
| 7518 | // less-than comparison, knowing the invariant term of the comparison, the |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7519 | // stride and the knowledge of NSW/NUW flags on the recurrence. |
| 7520 | bool ScalarEvolution::doesIVOverflowOnLT(const SCEV *RHS, const SCEV *Stride, |
| 7521 | bool IsSigned, bool NoWrap) { |
| 7522 | if (NoWrap) return false; |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 7523 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7524 | unsigned BitWidth = getTypeSizeInBits(RHS->getType()); |
| 7525 | const SCEV *One = getConstant(Stride->getType(), 1); |
Andrew Trick | 2afa325 | 2011-03-09 17:29:58 +0000 | [diff] [blame] | 7526 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7527 | if (IsSigned) { |
| 7528 | APInt MaxRHS = getSignedRange(RHS).getSignedMax(); |
| 7529 | APInt MaxValue = APInt::getSignedMaxValue(BitWidth); |
| 7530 | APInt MaxStrideMinusOne = getSignedRange(getMinusSCEV(Stride, One)) |
| 7531 | .getSignedMax(); |
Andrew Trick | 2afa325 | 2011-03-09 17:29:58 +0000 | [diff] [blame] | 7532 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7533 | // SMaxRHS + SMaxStrideMinusOne > SMaxValue => overflow! |
| 7534 | return (MaxValue - MaxStrideMinusOne).slt(MaxRHS); |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 7535 | } |
Dan Gohman | 0104842 | 2009-06-21 23:46:38 +0000 | [diff] [blame] | 7536 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7537 | APInt MaxRHS = getUnsignedRange(RHS).getUnsignedMax(); |
| 7538 | APInt MaxValue = APInt::getMaxValue(BitWidth); |
| 7539 | APInt MaxStrideMinusOne = getUnsignedRange(getMinusSCEV(Stride, One)) |
| 7540 | .getUnsignedMax(); |
| 7541 | |
| 7542 | // UMaxRHS + UMaxStrideMinusOne > UMaxValue => overflow! |
| 7543 | return (MaxValue - MaxStrideMinusOne).ult(MaxRHS); |
| 7544 | } |
| 7545 | |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7546 | // Verify if an linear IV with negative stride can overflow when in a |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7547 | // greater-than comparison, knowing the invariant term of the comparison, |
| 7548 | // the stride and the knowledge of NSW/NUW flags on the recurrence. |
| 7549 | bool ScalarEvolution::doesIVOverflowOnGT(const SCEV *RHS, const SCEV *Stride, |
| 7550 | bool IsSigned, bool NoWrap) { |
| 7551 | if (NoWrap) return false; |
| 7552 | |
| 7553 | unsigned BitWidth = getTypeSizeInBits(RHS->getType()); |
| 7554 | const SCEV *One = getConstant(Stride->getType(), 1); |
| 7555 | |
| 7556 | if (IsSigned) { |
| 7557 | APInt MinRHS = getSignedRange(RHS).getSignedMin(); |
| 7558 | APInt MinValue = APInt::getSignedMinValue(BitWidth); |
| 7559 | APInt MaxStrideMinusOne = getSignedRange(getMinusSCEV(Stride, One)) |
| 7560 | .getSignedMax(); |
| 7561 | |
| 7562 | // SMinRHS - SMaxStrideMinusOne < SMinValue => overflow! |
| 7563 | return (MinValue + MaxStrideMinusOne).sgt(MinRHS); |
| 7564 | } |
| 7565 | |
| 7566 | APInt MinRHS = getUnsignedRange(RHS).getUnsignedMin(); |
| 7567 | APInt MinValue = APInt::getMinValue(BitWidth); |
| 7568 | APInt MaxStrideMinusOne = getUnsignedRange(getMinusSCEV(Stride, One)) |
| 7569 | .getUnsignedMax(); |
| 7570 | |
| 7571 | // UMinRHS - UMaxStrideMinusOne < UMinValue => overflow! |
| 7572 | return (MinValue + MaxStrideMinusOne).ugt(MinRHS); |
| 7573 | } |
| 7574 | |
| 7575 | // Compute the backedge taken count knowing the interval difference, the |
| 7576 | // stride and presence of the equality in the comparison. |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7577 | const SCEV *ScalarEvolution::computeBECount(const SCEV *Delta, const SCEV *Step, |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7578 | bool Equality) { |
| 7579 | const SCEV *One = getConstant(Step->getType(), 1); |
| 7580 | Delta = Equality ? getAddExpr(Delta, Step) |
| 7581 | : getAddExpr(Delta, getMinusSCEV(Step, One)); |
| 7582 | return getUDivExpr(Delta, Step); |
Dan Gohman | 0104842 | 2009-06-21 23:46:38 +0000 | [diff] [blame] | 7583 | } |
| 7584 | |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7585 | /// HowManyLessThans - Return the number of times a backedge containing the |
| 7586 | /// specified less-than comparison will execute. If not computable, return |
Dan Gohman | 4c720c0 | 2009-06-06 14:37:11 +0000 | [diff] [blame] | 7587 | /// CouldNotCompute. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 7588 | /// |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7589 | /// @param ControlsExit is true when the LHS < RHS condition directly controls |
| 7590 | /// the branch (loops exits only if condition is true). In this case, we can use |
| 7591 | /// NoWrapFlags to skip overflow checks. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7592 | ScalarEvolution::ExitLimit |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7593 | ScalarEvolution::HowManyLessThans(const SCEV *LHS, const SCEV *RHS, |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7594 | const Loop *L, bool IsSigned, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7595 | bool ControlsExit) { |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7596 | // We handle only IV < Invariant |
| 7597 | if (!isLoopInvariant(RHS, L)) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7598 | return getCouldNotCompute(); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7599 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7600 | const SCEVAddRecExpr *IV = dyn_cast<SCEVAddRecExpr>(LHS); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 7601 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7602 | // Avoid weird loops |
| 7603 | if (!IV || IV->getLoop() != L || !IV->isAffine()) |
| 7604 | return getCouldNotCompute(); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7605 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7606 | bool NoWrap = ControlsExit && |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7607 | IV->getNoWrapFlags(IsSigned ? SCEV::FlagNSW : SCEV::FlagNUW); |
Wojciech Matyjewicz | 35545fd | 2008-02-13 11:51:34 +0000 | [diff] [blame] | 7608 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7609 | const SCEV *Stride = IV->getStepRecurrence(*this); |
Wojciech Matyjewicz | 35545fd | 2008-02-13 11:51:34 +0000 | [diff] [blame] | 7610 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7611 | // Avoid negative or zero stride values |
| 7612 | if (!isKnownPositive(Stride)) |
| 7613 | return getCouldNotCompute(); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 7614 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7615 | // Avoid proven overflow cases: this will ensure that the backedge taken count |
| 7616 | // will not generate any unsigned overflow. Relaxed no-overflow conditions |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7617 | // exploit NoWrapFlags, allowing to optimize in presence of undefined |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7618 | // behaviors like the case of C language. |
| 7619 | if (!Stride->isOne() && doesIVOverflowOnLT(RHS, Stride, IsSigned, NoWrap)) |
| 7620 | return getCouldNotCompute(); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 7621 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7622 | ICmpInst::Predicate Cond = IsSigned ? ICmpInst::ICMP_SLT |
| 7623 | : ICmpInst::ICMP_ULT; |
| 7624 | const SCEV *Start = IV->getStart(); |
| 7625 | const SCEV *End = RHS; |
Bradley Smith | 9992b16 | 2014-10-31 11:40:32 +0000 | [diff] [blame] | 7626 | if (!isLoopEntryGuardedByCond(L, Cond, getMinusSCEV(Start, Stride), RHS)) { |
| 7627 | const SCEV *Diff = getMinusSCEV(RHS, Start); |
| 7628 | // If we have NoWrap set, then we can assume that the increment won't |
| 7629 | // overflow, in which case if RHS - Start is a constant, we don't need to |
| 7630 | // do a max operation since we can just figure it out statically |
| 7631 | if (NoWrap && isa<SCEVConstant>(Diff)) { |
| 7632 | APInt D = dyn_cast<const SCEVConstant>(Diff)->getValue()->getValue(); |
| 7633 | if (D.isNegative()) |
| 7634 | End = Start; |
| 7635 | } else |
| 7636 | End = IsSigned ? getSMaxExpr(RHS, Start) |
| 7637 | : getUMaxExpr(RHS, Start); |
| 7638 | } |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 7639 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7640 | const SCEV *BECount = computeBECount(getMinusSCEV(End, Start), Stride, false); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 7641 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7642 | APInt MinStart = IsSigned ? getSignedRange(Start).getSignedMin() |
| 7643 | : getUnsignedRange(Start).getUnsignedMin(); |
Andrew Trick | 2afa325 | 2011-03-09 17:29:58 +0000 | [diff] [blame] | 7644 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7645 | APInt MinStride = IsSigned ? getSignedRange(Stride).getSignedMin() |
| 7646 | : getUnsignedRange(Stride).getUnsignedMin(); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 7647 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7648 | unsigned BitWidth = getTypeSizeInBits(LHS->getType()); |
| 7649 | APInt Limit = IsSigned ? APInt::getSignedMaxValue(BitWidth) - (MinStride - 1) |
| 7650 | : APInt::getMaxValue(BitWidth) - (MinStride - 1); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7651 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7652 | // Although End can be a MAX expression we estimate MaxEnd considering only |
| 7653 | // the case End = RHS. This is safe because in the other case (End - Start) |
| 7654 | // is zero, leading to a zero maximum backedge taken count. |
| 7655 | APInt MaxEnd = |
| 7656 | IsSigned ? APIntOps::smin(getSignedRange(RHS).getSignedMax(), Limit) |
| 7657 | : APIntOps::umin(getUnsignedRange(RHS).getUnsignedMax(), Limit); |
| 7658 | |
Arnaud A. de Grandmaison | 75c9e6d | 2014-03-15 22:13:15 +0000 | [diff] [blame] | 7659 | const SCEV *MaxBECount; |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7660 | if (isa<SCEVConstant>(BECount)) |
| 7661 | MaxBECount = BECount; |
| 7662 | else |
| 7663 | MaxBECount = computeBECount(getConstant(MaxEnd - MinStart), |
| 7664 | getConstant(MinStride), false); |
| 7665 | |
| 7666 | if (isa<SCEVCouldNotCompute>(MaxBECount)) |
| 7667 | MaxBECount = BECount; |
| 7668 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7669 | return ExitLimit(BECount, MaxBECount); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7670 | } |
| 7671 | |
| 7672 | ScalarEvolution::ExitLimit |
| 7673 | ScalarEvolution::HowManyGreaterThans(const SCEV *LHS, const SCEV *RHS, |
| 7674 | const Loop *L, bool IsSigned, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7675 | bool ControlsExit) { |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7676 | // We handle only IV > Invariant |
| 7677 | if (!isLoopInvariant(RHS, L)) |
| 7678 | return getCouldNotCompute(); |
| 7679 | |
| 7680 | const SCEVAddRecExpr *IV = dyn_cast<SCEVAddRecExpr>(LHS); |
| 7681 | |
| 7682 | // Avoid weird loops |
| 7683 | if (!IV || IV->getLoop() != L || !IV->isAffine()) |
| 7684 | return getCouldNotCompute(); |
| 7685 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7686 | bool NoWrap = ControlsExit && |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7687 | IV->getNoWrapFlags(IsSigned ? SCEV::FlagNSW : SCEV::FlagNUW); |
| 7688 | |
| 7689 | const SCEV *Stride = getNegativeSCEV(IV->getStepRecurrence(*this)); |
| 7690 | |
| 7691 | // Avoid negative or zero stride values |
| 7692 | if (!isKnownPositive(Stride)) |
| 7693 | return getCouldNotCompute(); |
| 7694 | |
| 7695 | // Avoid proven overflow cases: this will ensure that the backedge taken count |
| 7696 | // will not generate any unsigned overflow. Relaxed no-overflow conditions |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7697 | // exploit NoWrapFlags, allowing to optimize in presence of undefined |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7698 | // behaviors like the case of C language. |
| 7699 | if (!Stride->isOne() && doesIVOverflowOnGT(RHS, Stride, IsSigned, NoWrap)) |
| 7700 | return getCouldNotCompute(); |
| 7701 | |
| 7702 | ICmpInst::Predicate Cond = IsSigned ? ICmpInst::ICMP_SGT |
| 7703 | : ICmpInst::ICMP_UGT; |
| 7704 | |
| 7705 | const SCEV *Start = IV->getStart(); |
| 7706 | const SCEV *End = RHS; |
Bradley Smith | 9992b16 | 2014-10-31 11:40:32 +0000 | [diff] [blame] | 7707 | if (!isLoopEntryGuardedByCond(L, Cond, getAddExpr(Start, Stride), RHS)) { |
| 7708 | const SCEV *Diff = getMinusSCEV(RHS, Start); |
| 7709 | // If we have NoWrap set, then we can assume that the increment won't |
| 7710 | // overflow, in which case if RHS - Start is a constant, we don't need to |
| 7711 | // do a max operation since we can just figure it out statically |
| 7712 | if (NoWrap && isa<SCEVConstant>(Diff)) { |
| 7713 | APInt D = dyn_cast<const SCEVConstant>(Diff)->getValue()->getValue(); |
| 7714 | if (!D.isNegative()) |
| 7715 | End = Start; |
| 7716 | } else |
| 7717 | End = IsSigned ? getSMinExpr(RHS, Start) |
| 7718 | : getUMinExpr(RHS, Start); |
| 7719 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7720 | |
| 7721 | const SCEV *BECount = computeBECount(getMinusSCEV(Start, End), Stride, false); |
| 7722 | |
| 7723 | APInt MaxStart = IsSigned ? getSignedRange(Start).getSignedMax() |
| 7724 | : getUnsignedRange(Start).getUnsignedMax(); |
| 7725 | |
| 7726 | APInt MinStride = IsSigned ? getSignedRange(Stride).getSignedMin() |
| 7727 | : getUnsignedRange(Stride).getUnsignedMin(); |
| 7728 | |
| 7729 | unsigned BitWidth = getTypeSizeInBits(LHS->getType()); |
| 7730 | APInt Limit = IsSigned ? APInt::getSignedMinValue(BitWidth) + (MinStride - 1) |
| 7731 | : APInt::getMinValue(BitWidth) + (MinStride - 1); |
| 7732 | |
| 7733 | // Although End can be a MIN expression we estimate MinEnd considering only |
| 7734 | // the case End = RHS. This is safe because in the other case (Start - End) |
| 7735 | // is zero, leading to a zero maximum backedge taken count. |
| 7736 | APInt MinEnd = |
| 7737 | IsSigned ? APIntOps::smax(getSignedRange(RHS).getSignedMin(), Limit) |
| 7738 | : APIntOps::umax(getUnsignedRange(RHS).getUnsignedMin(), Limit); |
| 7739 | |
| 7740 | |
| 7741 | const SCEV *MaxBECount = getCouldNotCompute(); |
| 7742 | if (isa<SCEVConstant>(BECount)) |
| 7743 | MaxBECount = BECount; |
| 7744 | else |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 7745 | MaxBECount = computeBECount(getConstant(MaxStart - MinEnd), |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7746 | getConstant(MinStride), false); |
| 7747 | |
| 7748 | if (isa<SCEVCouldNotCompute>(MaxBECount)) |
| 7749 | MaxBECount = BECount; |
| 7750 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7751 | return ExitLimit(BECount, MaxBECount); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7752 | } |
| 7753 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7754 | /// getNumIterationsInRange - Return the number of iterations of this loop that |
| 7755 | /// produce values in the specified constant range. Another way of looking at |
| 7756 | /// this is that it returns the first iteration number where the value is not in |
| 7757 | /// the condition, thus computing the exit count. If the iteration count can't |
| 7758 | /// be computed, an instance of SCEVCouldNotCompute is returned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7759 | const SCEV *SCEVAddRecExpr::getNumIterationsInRange(ConstantRange Range, |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7760 | ScalarEvolution &SE) const { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7761 | if (Range.isFullSet()) // Infinite loop. |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7762 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7763 | |
| 7764 | // If the start is a non-zero constant, shift the range to simplify things. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7765 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(getStart())) |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 7766 | if (!SC->getValue()->isZero()) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7767 | SmallVector<const SCEV *, 4> Operands(op_begin(), op_end()); |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 7768 | Operands[0] = SE.getConstant(SC->getType(), 0); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 7769 | const SCEV *Shifted = SE.getAddRecExpr(Operands, getLoop(), |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 7770 | getNoWrapFlags(FlagNW)); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7771 | if (const SCEVAddRecExpr *ShiftedAddRec = |
| 7772 | dyn_cast<SCEVAddRecExpr>(Shifted)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7773 | return ShiftedAddRec->getNumIterationsInRange( |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7774 | Range.subtract(SC->getValue()->getValue()), SE); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7775 | // This is strange and shouldn't happen. |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7776 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7777 | } |
| 7778 | |
| 7779 | // The only time we can solve this is when we have all constant indices. |
| 7780 | // Otherwise, we cannot determine the overflow conditions. |
| 7781 | for (unsigned i = 0, e = getNumOperands(); i != e; ++i) |
| 7782 | if (!isa<SCEVConstant>(getOperand(i))) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7783 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7784 | |
| 7785 | |
| 7786 | // Okay at this point we know that all elements of the chrec are constants and |
| 7787 | // that the start element is zero. |
| 7788 | |
| 7789 | // First check to see if the range contains zero. If not, the first |
| 7790 | // iteration exits. |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 7791 | unsigned BitWidth = SE.getTypeSizeInBits(getType()); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 7792 | if (!Range.contains(APInt(BitWidth, 0))) |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 7793 | return SE.getConstant(getType(), 0); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7794 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7795 | if (isAffine()) { |
| 7796 | // If this is an affine expression then we have this situation: |
| 7797 | // Solve {0,+,A} in Range === Ax in Range |
| 7798 | |
Nick Lewycky | 5246026 | 2007-07-16 02:08:00 +0000 | [diff] [blame] | 7799 | // We know that zero is in the range. If A is positive then we know that |
| 7800 | // the upper value of the range must be the first possible exit value. |
| 7801 | // If A is negative then the lower of the range is the last possible loop |
| 7802 | // value. Also note that we already checked for a full range. |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 7803 | APInt One(BitWidth,1); |
Nick Lewycky | 5246026 | 2007-07-16 02:08:00 +0000 | [diff] [blame] | 7804 | APInt A = cast<SCEVConstant>(getOperand(1))->getValue()->getValue(); |
| 7805 | APInt End = A.sge(One) ? (Range.getUpper() - One) : Range.getLower(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7806 | |
Nick Lewycky | 5246026 | 2007-07-16 02:08:00 +0000 | [diff] [blame] | 7807 | // The exit value should be (End+A)/A. |
Nick Lewycky | 3934961 | 2007-09-27 14:12:54 +0000 | [diff] [blame] | 7808 | APInt ExitVal = (End + A).udiv(A); |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 7809 | ConstantInt *ExitValue = ConstantInt::get(SE.getContext(), ExitVal); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7810 | |
| 7811 | // Evaluate at the exit value. If we really did fall out of the valid |
| 7812 | // range, then we computed our trip count, otherwise wrap around or other |
| 7813 | // things must have happened. |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7814 | ConstantInt *Val = EvaluateConstantChrecAtConstant(this, ExitValue, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 7815 | if (Range.contains(Val->getValue())) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7816 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7817 | |
| 7818 | // Ensure that the previous value is in the range. This is a sanity check. |
Reid Spencer | 3a7e9d8 | 2007-02-28 19:57:34 +0000 | [diff] [blame] | 7819 | assert(Range.contains( |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7820 | EvaluateConstantChrecAtConstant(this, |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 7821 | ConstantInt::get(SE.getContext(), ExitVal - One), SE)->getValue()) && |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7822 | "Linear scev computation is off in a bad way!"); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7823 | return SE.getConstant(ExitValue); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7824 | } else if (isQuadratic()) { |
| 7825 | // If this is a quadratic (3-term) AddRec {L,+,M,+,N}, find the roots of the |
| 7826 | // quadratic equation to solve it. To do this, we must frame our problem in |
| 7827 | // terms of figuring out when zero is crossed, instead of when |
| 7828 | // Range.getUpper() is crossed. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7829 | SmallVector<const SCEV *, 4> NewOps(op_begin(), op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7830 | NewOps[0] = SE.getNegativeSCEV(SE.getConstant(Range.getUpper())); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 7831 | const SCEV *NewAddRec = SE.getAddRecExpr(NewOps, getLoop(), |
| 7832 | // getNoWrapFlags(FlagNW) |
| 7833 | FlagAnyWrap); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7834 | |
| 7835 | // Next, solve the constructed addrec |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7836 | std::pair<const SCEV *,const SCEV *> Roots = |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7837 | SolveQuadraticEquation(cast<SCEVAddRecExpr>(NewAddRec), SE); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 7838 | const SCEVConstant *R1 = dyn_cast<SCEVConstant>(Roots.first); |
| 7839 | const SCEVConstant *R2 = dyn_cast<SCEVConstant>(Roots.second); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7840 | if (R1) { |
| 7841 | // Pick the smallest positive root value. |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 7842 | if (ConstantInt *CB = |
Owen Anderson | 487375e | 2009-07-29 18:55:55 +0000 | [diff] [blame] | 7843 | dyn_cast<ConstantInt>(ConstantExpr::getICmp(ICmpInst::ICMP_ULT, |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 7844 | R1->getValue(), R2->getValue()))) { |
David Blaikie | dc3f01e | 2015-03-09 01:57:13 +0000 | [diff] [blame] | 7845 | if (!CB->getZExtValue()) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7846 | std::swap(R1, R2); // R1 is the minimum root now. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7847 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7848 | // Make sure the root is not off by one. The returned iteration should |
| 7849 | // not be in the range, but the previous one should be. When solving |
| 7850 | // for "X*X < 5", for example, we should not return a root of 2. |
| 7851 | ConstantInt *R1Val = EvaluateConstantChrecAtConstant(this, |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7852 | R1->getValue(), |
| 7853 | SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 7854 | if (Range.contains(R1Val->getValue())) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7855 | // The next iteration must be out of the range... |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 7856 | ConstantInt *NextVal = |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 7857 | ConstantInt::get(SE.getContext(), R1->getValue()->getValue()+1); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7858 | |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7859 | R1Val = EvaluateConstantChrecAtConstant(this, NextVal, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 7860 | if (!Range.contains(R1Val->getValue())) |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7861 | return SE.getConstant(NextVal); |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7862 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7863 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7864 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7865 | // If R1 was not in the range, then it is a good return value. Make |
| 7866 | // sure that R1-1 WAS in the range though, just in case. |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 7867 | ConstantInt *NextVal = |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 7868 | ConstantInt::get(SE.getContext(), R1->getValue()->getValue()-1); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7869 | R1Val = EvaluateConstantChrecAtConstant(this, NextVal, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 7870 | if (Range.contains(R1Val->getValue())) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7871 | return R1; |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7872 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7873 | } |
| 7874 | } |
| 7875 | } |
| 7876 | |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 7877 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7878 | } |
| 7879 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7880 | namespace { |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 7881 | struct FindUndefs { |
| 7882 | bool Found; |
| 7883 | FindUndefs() : Found(false) {} |
| 7884 | |
| 7885 | bool follow(const SCEV *S) { |
| 7886 | if (const SCEVUnknown *C = dyn_cast<SCEVUnknown>(S)) { |
| 7887 | if (isa<UndefValue>(C->getValue())) |
| 7888 | Found = true; |
| 7889 | } else if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) { |
| 7890 | if (isa<UndefValue>(C->getValue())) |
| 7891 | Found = true; |
| 7892 | } |
| 7893 | |
| 7894 | // Keep looking if we haven't found it yet. |
| 7895 | return !Found; |
| 7896 | } |
| 7897 | bool isDone() const { |
| 7898 | // Stop recursion if we have found an undef. |
| 7899 | return Found; |
| 7900 | } |
| 7901 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 7902 | } |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 7903 | |
| 7904 | // Return true when S contains at least an undef value. |
| 7905 | static inline bool |
| 7906 | containsUndefs(const SCEV *S) { |
| 7907 | FindUndefs F; |
| 7908 | SCEVTraversal<FindUndefs> ST(F); |
| 7909 | ST.visitAll(S); |
| 7910 | |
| 7911 | return F.Found; |
| 7912 | } |
| 7913 | |
| 7914 | namespace { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7915 | // Collect all steps of SCEV expressions. |
| 7916 | struct SCEVCollectStrides { |
| 7917 | ScalarEvolution &SE; |
| 7918 | SmallVectorImpl<const SCEV *> &Strides; |
| 7919 | |
| 7920 | SCEVCollectStrides(ScalarEvolution &SE, SmallVectorImpl<const SCEV *> &S) |
| 7921 | : SE(SE), Strides(S) {} |
| 7922 | |
| 7923 | bool follow(const SCEV *S) { |
| 7924 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) |
| 7925 | Strides.push_back(AR->getStepRecurrence(SE)); |
| 7926 | return true; |
| 7927 | } |
| 7928 | bool isDone() const { return false; } |
| 7929 | }; |
| 7930 | |
| 7931 | // Collect all SCEVUnknown and SCEVMulExpr expressions. |
| 7932 | struct SCEVCollectTerms { |
| 7933 | SmallVectorImpl<const SCEV *> &Terms; |
| 7934 | |
| 7935 | SCEVCollectTerms(SmallVectorImpl<const SCEV *> &T) |
| 7936 | : Terms(T) {} |
| 7937 | |
| 7938 | bool follow(const SCEV *S) { |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 7939 | if (isa<SCEVUnknown>(S) || isa<SCEVMulExpr>(S)) { |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 7940 | if (!containsUndefs(S)) |
| 7941 | Terms.push_back(S); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7942 | |
| 7943 | // Stop recursion: once we collected a term, do not walk its operands. |
| 7944 | return false; |
| 7945 | } |
| 7946 | |
| 7947 | // Keep looking. |
| 7948 | return true; |
| 7949 | } |
| 7950 | bool isDone() const { return false; } |
| 7951 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 7952 | } |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7953 | |
| 7954 | /// Find parametric terms in this SCEVAddRecExpr. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 7955 | void ScalarEvolution::collectParametricTerms(const SCEV *Expr, |
| 7956 | SmallVectorImpl<const SCEV *> &Terms) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7957 | SmallVector<const SCEV *, 4> Strides; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 7958 | SCEVCollectStrides StrideCollector(*this, Strides); |
| 7959 | visitAll(Expr, StrideCollector); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7960 | |
| 7961 | DEBUG({ |
| 7962 | dbgs() << "Strides:\n"; |
| 7963 | for (const SCEV *S : Strides) |
| 7964 | dbgs() << *S << "\n"; |
| 7965 | }); |
| 7966 | |
| 7967 | for (const SCEV *S : Strides) { |
| 7968 | SCEVCollectTerms TermCollector(Terms); |
| 7969 | visitAll(S, TermCollector); |
| 7970 | } |
| 7971 | |
| 7972 | DEBUG({ |
| 7973 | dbgs() << "Terms:\n"; |
| 7974 | for (const SCEV *T : Terms) |
| 7975 | dbgs() << *T << "\n"; |
| 7976 | }); |
| 7977 | } |
| 7978 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 7979 | static bool findArrayDimensionsRec(ScalarEvolution &SE, |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7980 | SmallVectorImpl<const SCEV *> &Terms, |
Sebastian Pop | 47fe7de | 2014-05-09 22:45:07 +0000 | [diff] [blame] | 7981 | SmallVectorImpl<const SCEV *> &Sizes) { |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 7982 | int Last = Terms.size() - 1; |
| 7983 | const SCEV *Step = Terms[Last]; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 7984 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7985 | // End of recursion. |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 7986 | if (Last == 0) { |
| 7987 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(Step)) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 7988 | SmallVector<const SCEV *, 2> Qs; |
| 7989 | for (const SCEV *Op : M->operands()) |
| 7990 | if (!isa<SCEVConstant>(Op)) |
| 7991 | Qs.push_back(Op); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 7992 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 7993 | Step = SE.getMulExpr(Qs); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 7994 | } |
| 7995 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 7996 | Sizes.push_back(Step); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 7997 | return true; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 7998 | } |
| 7999 | |
Benjamin Kramer | 8cff45a | 2014-05-10 17:47:18 +0000 | [diff] [blame] | 8000 | for (const SCEV *&Term : Terms) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8001 | // Normalize the terms before the next call to findArrayDimensionsRec. |
| 8002 | const SCEV *Q, *R; |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 8003 | SCEVDivision::divide(SE, Term, Step, &Q, &R); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8004 | |
| 8005 | // Bail out when GCD does not evenly divide one of the terms. |
| 8006 | if (!R->isZero()) |
| 8007 | return false; |
| 8008 | |
Benjamin Kramer | 8cff45a | 2014-05-10 17:47:18 +0000 | [diff] [blame] | 8009 | Term = Q; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8010 | } |
| 8011 | |
Tobias Grosser | 3080cf1 | 2014-05-08 07:55:34 +0000 | [diff] [blame] | 8012 | // Remove all SCEVConstants. |
Tobias Grosser | 1e9db7e | 2014-05-08 21:43:19 +0000 | [diff] [blame] | 8013 | Terms.erase(std::remove_if(Terms.begin(), Terms.end(), [](const SCEV *E) { |
| 8014 | return isa<SCEVConstant>(E); |
| 8015 | }), |
| 8016 | Terms.end()); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8017 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8018 | if (Terms.size() > 0) |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8019 | if (!findArrayDimensionsRec(SE, Terms, Sizes)) |
| 8020 | return false; |
| 8021 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 8022 | Sizes.push_back(Step); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8023 | return true; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8024 | } |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8025 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8026 | namespace { |
| 8027 | struct FindParameter { |
| 8028 | bool FoundParameter; |
| 8029 | FindParameter() : FoundParameter(false) {} |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8030 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8031 | bool follow(const SCEV *S) { |
| 8032 | if (isa<SCEVUnknown>(S)) { |
| 8033 | FoundParameter = true; |
| 8034 | // Stop recursion: we found a parameter. |
| 8035 | return false; |
| 8036 | } |
| 8037 | // Keep looking. |
| 8038 | return true; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8039 | } |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8040 | bool isDone() const { |
| 8041 | // Stop recursion if we have found a parameter. |
| 8042 | return FoundParameter; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8043 | } |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8044 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 8045 | } |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8046 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8047 | // Returns true when S contains at least a SCEVUnknown parameter. |
| 8048 | static inline bool |
| 8049 | containsParameters(const SCEV *S) { |
| 8050 | FindParameter F; |
| 8051 | SCEVTraversal<FindParameter> ST(F); |
| 8052 | ST.visitAll(S); |
| 8053 | |
| 8054 | return F.FoundParameter; |
| 8055 | } |
| 8056 | |
| 8057 | // Returns true when one of the SCEVs of Terms contains a SCEVUnknown parameter. |
| 8058 | static inline bool |
| 8059 | containsParameters(SmallVectorImpl<const SCEV *> &Terms) { |
| 8060 | for (const SCEV *T : Terms) |
| 8061 | if (containsParameters(T)) |
| 8062 | return true; |
| 8063 | return false; |
| 8064 | } |
| 8065 | |
| 8066 | // Return the number of product terms in S. |
| 8067 | static inline int numberOfTerms(const SCEV *S) { |
| 8068 | if (const SCEVMulExpr *Expr = dyn_cast<SCEVMulExpr>(S)) |
| 8069 | return Expr->getNumOperands(); |
| 8070 | return 1; |
| 8071 | } |
| 8072 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8073 | static const SCEV *removeConstantFactors(ScalarEvolution &SE, const SCEV *T) { |
| 8074 | if (isa<SCEVConstant>(T)) |
| 8075 | return nullptr; |
| 8076 | |
| 8077 | if (isa<SCEVUnknown>(T)) |
| 8078 | return T; |
| 8079 | |
| 8080 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(T)) { |
| 8081 | SmallVector<const SCEV *, 2> Factors; |
| 8082 | for (const SCEV *Op : M->operands()) |
| 8083 | if (!isa<SCEVConstant>(Op)) |
| 8084 | Factors.push_back(Op); |
| 8085 | |
| 8086 | return SE.getMulExpr(Factors); |
| 8087 | } |
| 8088 | |
| 8089 | return T; |
| 8090 | } |
| 8091 | |
| 8092 | /// Return the size of an element read or written by Inst. |
| 8093 | const SCEV *ScalarEvolution::getElementSize(Instruction *Inst) { |
| 8094 | Type *Ty; |
| 8095 | if (StoreInst *Store = dyn_cast<StoreInst>(Inst)) |
| 8096 | Ty = Store->getValueOperand()->getType(); |
| 8097 | else if (LoadInst *Load = dyn_cast<LoadInst>(Inst)) |
Tobias Grosser | 40ac100 | 2014-06-08 19:21:20 +0000 | [diff] [blame] | 8098 | Ty = Load->getType(); |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8099 | else |
| 8100 | return nullptr; |
| 8101 | |
| 8102 | Type *ETy = getEffectiveSCEVType(PointerType::getUnqual(Ty)); |
| 8103 | return getSizeOfExpr(ETy, Ty); |
| 8104 | } |
| 8105 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8106 | /// Second step of delinearization: compute the array dimensions Sizes from the |
| 8107 | /// set of Terms extracted from the memory access function of this SCEVAddRec. |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8108 | void ScalarEvolution::findArrayDimensions(SmallVectorImpl<const SCEV *> &Terms, |
| 8109 | SmallVectorImpl<const SCEV *> &Sizes, |
| 8110 | const SCEV *ElementSize) const { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8111 | |
Sebastian Pop | 5352408 | 2014-05-29 19:44:05 +0000 | [diff] [blame] | 8112 | if (Terms.size() < 1 || !ElementSize) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8113 | return; |
| 8114 | |
| 8115 | // Early return when Terms do not contain parameters: we do not delinearize |
| 8116 | // non parametric SCEVs. |
| 8117 | if (!containsParameters(Terms)) |
| 8118 | return; |
| 8119 | |
| 8120 | DEBUG({ |
| 8121 | dbgs() << "Terms:\n"; |
| 8122 | for (const SCEV *T : Terms) |
| 8123 | dbgs() << *T << "\n"; |
| 8124 | }); |
| 8125 | |
| 8126 | // Remove duplicates. |
| 8127 | std::sort(Terms.begin(), Terms.end()); |
| 8128 | Terms.erase(std::unique(Terms.begin(), Terms.end()), Terms.end()); |
| 8129 | |
| 8130 | // Put larger terms first. |
| 8131 | std::sort(Terms.begin(), Terms.end(), [](const SCEV *LHS, const SCEV *RHS) { |
| 8132 | return numberOfTerms(LHS) > numberOfTerms(RHS); |
| 8133 | }); |
| 8134 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8135 | ScalarEvolution &SE = *const_cast<ScalarEvolution *>(this); |
| 8136 | |
| 8137 | // Divide all terms by the element size. |
| 8138 | for (const SCEV *&Term : Terms) { |
| 8139 | const SCEV *Q, *R; |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 8140 | SCEVDivision::divide(SE, Term, ElementSize, &Q, &R); |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8141 | Term = Q; |
| 8142 | } |
| 8143 | |
| 8144 | SmallVector<const SCEV *, 4> NewTerms; |
| 8145 | |
| 8146 | // Remove constant factors. |
| 8147 | for (const SCEV *T : Terms) |
| 8148 | if (const SCEV *NewT = removeConstantFactors(SE, T)) |
| 8149 | NewTerms.push_back(NewT); |
| 8150 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8151 | DEBUG({ |
| 8152 | dbgs() << "Terms after sorting:\n"; |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8153 | for (const SCEV *T : NewTerms) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8154 | dbgs() << *T << "\n"; |
| 8155 | }); |
| 8156 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8157 | if (NewTerms.empty() || |
| 8158 | !findArrayDimensionsRec(SE, NewTerms, Sizes)) { |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8159 | Sizes.clear(); |
| 8160 | return; |
| 8161 | } |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8162 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8163 | // The last element to be pushed into Sizes is the size of an element. |
| 8164 | Sizes.push_back(ElementSize); |
| 8165 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8166 | DEBUG({ |
| 8167 | dbgs() << "Sizes:\n"; |
| 8168 | for (const SCEV *S : Sizes) |
| 8169 | dbgs() << *S << "\n"; |
| 8170 | }); |
| 8171 | } |
| 8172 | |
| 8173 | /// Third step of delinearization: compute the access functions for the |
| 8174 | /// Subscripts based on the dimensions in Sizes. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8175 | void ScalarEvolution::computeAccessFunctions( |
| 8176 | const SCEV *Expr, SmallVectorImpl<const SCEV *> &Subscripts, |
| 8177 | SmallVectorImpl<const SCEV *> &Sizes) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8178 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8179 | // Early exit in case this SCEV is not an affine multivariate function. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8180 | if (Sizes.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8181 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8182 | |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8183 | if (auto AR = dyn_cast<SCEVAddRecExpr>(Expr)) |
| 8184 | if (!AR->isAffine()) |
| 8185 | return; |
| 8186 | |
| 8187 | const SCEV *Res = Expr; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8188 | int Last = Sizes.size() - 1; |
| 8189 | for (int i = Last; i >= 0; i--) { |
| 8190 | const SCEV *Q, *R; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8191 | SCEVDivision::divide(*this, Res, Sizes[i], &Q, &R); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8192 | |
| 8193 | DEBUG({ |
| 8194 | dbgs() << "Res: " << *Res << "\n"; |
| 8195 | dbgs() << "Sizes[i]: " << *Sizes[i] << "\n"; |
| 8196 | dbgs() << "Res divided by Sizes[i]:\n"; |
| 8197 | dbgs() << "Quotient: " << *Q << "\n"; |
| 8198 | dbgs() << "Remainder: " << *R << "\n"; |
| 8199 | }); |
| 8200 | |
| 8201 | Res = Q; |
| 8202 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8203 | // Do not record the last subscript corresponding to the size of elements in |
| 8204 | // the array. |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8205 | if (i == Last) { |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8206 | |
| 8207 | // Bail out if the remainder is too complex. |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8208 | if (isa<SCEVAddRecExpr>(R)) { |
| 8209 | Subscripts.clear(); |
| 8210 | Sizes.clear(); |
| 8211 | return; |
| 8212 | } |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 8213 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8214 | continue; |
| 8215 | } |
| 8216 | |
| 8217 | // Record the access function for the current subscript. |
| 8218 | Subscripts.push_back(R); |
| 8219 | } |
| 8220 | |
| 8221 | // Also push in last position the remainder of the last division: it will be |
| 8222 | // the access function of the innermost dimension. |
| 8223 | Subscripts.push_back(Res); |
| 8224 | |
| 8225 | std::reverse(Subscripts.begin(), Subscripts.end()); |
| 8226 | |
| 8227 | DEBUG({ |
| 8228 | dbgs() << "Subscripts:\n"; |
| 8229 | for (const SCEV *S : Subscripts) |
| 8230 | dbgs() << *S << "\n"; |
| 8231 | }); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8232 | } |
| 8233 | |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8234 | /// Splits the SCEV into two vectors of SCEVs representing the subscripts and |
| 8235 | /// sizes of an array access. Returns the remainder of the delinearization that |
Sebastian Pop | 7ee1472 | 2013-11-13 22:37:58 +0000 | [diff] [blame] | 8236 | /// is the offset start of the array. The SCEV->delinearize algorithm computes |
| 8237 | /// the multiples of SCEV coefficients: that is a pattern matching of sub |
| 8238 | /// expressions in the stride and base of a SCEV corresponding to the |
| 8239 | /// computation of a GCD (greatest common divisor) of base and stride. When |
| 8240 | /// SCEV->delinearize fails, it returns the SCEV unchanged. |
| 8241 | /// |
| 8242 | /// For example: when analyzing the memory access A[i][j][k] in this loop nest |
| 8243 | /// |
| 8244 | /// void foo(long n, long m, long o, double A[n][m][o]) { |
| 8245 | /// |
| 8246 | /// for (long i = 0; i < n; i++) |
| 8247 | /// for (long j = 0; j < m; j++) |
| 8248 | /// for (long k = 0; k < o; k++) |
| 8249 | /// A[i][j][k] = 1.0; |
| 8250 | /// } |
| 8251 | /// |
| 8252 | /// the delinearization input is the following AddRec SCEV: |
| 8253 | /// |
| 8254 | /// AddRec: {{{%A,+,(8 * %m * %o)}<%for.i>,+,(8 * %o)}<%for.j>,+,8}<%for.k> |
| 8255 | /// |
| 8256 | /// From this SCEV, we are able to say that the base offset of the access is %A |
| 8257 | /// because it appears as an offset that does not divide any of the strides in |
| 8258 | /// the loops: |
| 8259 | /// |
| 8260 | /// CHECK: Base offset: %A |
| 8261 | /// |
| 8262 | /// and then SCEV->delinearize determines the size of some of the dimensions of |
| 8263 | /// the array as these are the multiples by which the strides are happening: |
| 8264 | /// |
| 8265 | /// CHECK: ArrayDecl[UnknownSize][%m][%o] with elements of sizeof(double) bytes. |
| 8266 | /// |
| 8267 | /// Note that the outermost dimension remains of UnknownSize because there are |
| 8268 | /// no strides that would help identifying the size of the last dimension: when |
| 8269 | /// the array has been statically allocated, one could compute the size of that |
| 8270 | /// dimension by dividing the overall size of the array by the size of the known |
| 8271 | /// dimensions: %m * %o * 8. |
| 8272 | /// |
| 8273 | /// Finally delinearize provides the access functions for the array reference |
| 8274 | /// that does correspond to A[i][j][k] of the above C testcase: |
| 8275 | /// |
| 8276 | /// CHECK: ArrayRef[{0,+,1}<%for.i>][{0,+,1}<%for.j>][{0,+,1}<%for.k>] |
| 8277 | /// |
| 8278 | /// The testcases are checking the output of a function pass: |
| 8279 | /// DelinearizationPass that walks through all loads and stores of a function |
| 8280 | /// asking for the SCEV of the memory access with respect to all enclosing |
| 8281 | /// loops, calling SCEV->delinearize on that and printing the results. |
| 8282 | |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8283 | void ScalarEvolution::delinearize(const SCEV *Expr, |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8284 | SmallVectorImpl<const SCEV *> &Subscripts, |
| 8285 | SmallVectorImpl<const SCEV *> &Sizes, |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8286 | const SCEV *ElementSize) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8287 | // First step: collect parametric terms. |
| 8288 | SmallVector<const SCEV *, 4> Terms; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8289 | collectParametricTerms(Expr, Terms); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8290 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8291 | if (Terms.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8292 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8293 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8294 | // Second step: find subscript sizes. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8295 | findArrayDimensions(Terms, Sizes, ElementSize); |
Sebastian Pop | 7ee1472 | 2013-11-13 22:37:58 +0000 | [diff] [blame] | 8296 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8297 | if (Sizes.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8298 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8299 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8300 | // Third step: compute the access functions for each subscript. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8301 | computeAccessFunctions(Expr, Subscripts, Sizes); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8302 | |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 8303 | if (Subscripts.empty()) |
| 8304 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 8305 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8306 | DEBUG({ |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 8307 | dbgs() << "succeeded to delinearize " << *Expr << "\n"; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8308 | dbgs() << "ArrayDecl[UnknownSize]"; |
| 8309 | for (const SCEV *S : Sizes) |
| 8310 | dbgs() << "[" << *S << "]"; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8311 | |
Sebastian Pop | 444621a | 2014-05-09 22:45:02 +0000 | [diff] [blame] | 8312 | dbgs() << "\nArrayRef"; |
| 8313 | for (const SCEV *S : Subscripts) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 8314 | dbgs() << "[" << *S << "]"; |
| 8315 | dbgs() << "\n"; |
| 8316 | }); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 8317 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8318 | |
| 8319 | //===----------------------------------------------------------------------===// |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8320 | // SCEVCallbackVH Class Implementation |
| 8321 | //===----------------------------------------------------------------------===// |
| 8322 | |
Dan Gohman | d33a090 | 2009-05-19 19:22:47 +0000 | [diff] [blame] | 8323 | void ScalarEvolution::SCEVCallbackVH::deleted() { |
Dan Gohman | dd707af | 2009-07-13 22:20:53 +0000 | [diff] [blame] | 8324 | assert(SE && "SCEVCallbackVH called with a null ScalarEvolution!"); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8325 | if (PHINode *PN = dyn_cast<PHINode>(getValPtr())) |
| 8326 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 8327 | SE->ValueExprMap.erase(getValPtr()); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8328 | // this now dangles! |
| 8329 | } |
| 8330 | |
Dan Gohman | 7a06672 | 2010-07-28 01:09:07 +0000 | [diff] [blame] | 8331 | void ScalarEvolution::SCEVCallbackVH::allUsesReplacedWith(Value *V) { |
Dan Gohman | dd707af | 2009-07-13 22:20:53 +0000 | [diff] [blame] | 8332 | assert(SE && "SCEVCallbackVH called with a null ScalarEvolution!"); |
Eric Christopher | ef6d593 | 2010-07-29 01:25:38 +0000 | [diff] [blame] | 8333 | |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8334 | // Forget all the expressions associated with users of the old value, |
| 8335 | // so that future queries will recompute the expressions using the new |
| 8336 | // value. |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 8337 | Value *Old = getValPtr(); |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 8338 | SmallVector<User *, 16> Worklist(Old->user_begin(), Old->user_end()); |
Dan Gohman | f34f863 | 2009-07-14 14:34:04 +0000 | [diff] [blame] | 8339 | SmallPtrSet<User *, 8> Visited; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8340 | while (!Worklist.empty()) { |
| 8341 | User *U = Worklist.pop_back_val(); |
| 8342 | // Deleting the Old value will cause this to dangle. Postpone |
| 8343 | // that until everything else is done. |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 8344 | if (U == Old) |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8345 | continue; |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 8346 | if (!Visited.insert(U).second) |
Dan Gohman | f34f863 | 2009-07-14 14:34:04 +0000 | [diff] [blame] | 8347 | continue; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8348 | if (PHINode *PN = dyn_cast<PHINode>(U)) |
| 8349 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 8350 | SE->ValueExprMap.erase(U); |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 8351 | Worklist.insert(Worklist.end(), U->user_begin(), U->user_end()); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8352 | } |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 8353 | // Delete the Old value. |
| 8354 | if (PHINode *PN = dyn_cast<PHINode>(Old)) |
| 8355 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 8356 | SE->ValueExprMap.erase(Old); |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 8357 | // this now dangles! |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8358 | } |
| 8359 | |
Dan Gohman | d33a090 | 2009-05-19 19:22:47 +0000 | [diff] [blame] | 8360 | ScalarEvolution::SCEVCallbackVH::SCEVCallbackVH(Value *V, ScalarEvolution *se) |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8361 | : CallbackVH(V), SE(se) {} |
| 8362 | |
| 8363 | //===----------------------------------------------------------------------===// |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8364 | // ScalarEvolution Class Implementation |
| 8365 | //===----------------------------------------------------------------------===// |
| 8366 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8367 | ScalarEvolution::ScalarEvolution(Function &F, TargetLibraryInfo &TLI, |
| 8368 | AssumptionCache &AC, DominatorTree &DT, |
| 8369 | LoopInfo &LI) |
| 8370 | : F(F), TLI(TLI), AC(AC), DT(DT), LI(LI), |
| 8371 | CouldNotCompute(new SCEVCouldNotCompute()), |
| 8372 | WalkingBEDominatingConds(false), ValuesAtScopes(64), LoopDispositions(64), |
| 8373 | BlockDispositions(64), FirstUnknown(nullptr) {} |
| 8374 | |
| 8375 | ScalarEvolution::ScalarEvolution(ScalarEvolution &&Arg) |
| 8376 | : F(Arg.F), TLI(Arg.TLI), AC(Arg.AC), DT(Arg.DT), LI(Arg.LI), |
| 8377 | CouldNotCompute(std::move(Arg.CouldNotCompute)), |
| 8378 | ValueExprMap(std::move(Arg.ValueExprMap)), |
| 8379 | WalkingBEDominatingConds(false), |
| 8380 | BackedgeTakenCounts(std::move(Arg.BackedgeTakenCounts)), |
| 8381 | ConstantEvolutionLoopExitValue( |
| 8382 | std::move(Arg.ConstantEvolutionLoopExitValue)), |
| 8383 | ValuesAtScopes(std::move(Arg.ValuesAtScopes)), |
| 8384 | LoopDispositions(std::move(Arg.LoopDispositions)), |
| 8385 | BlockDispositions(std::move(Arg.BlockDispositions)), |
| 8386 | UnsignedRanges(std::move(Arg.UnsignedRanges)), |
| 8387 | SignedRanges(std::move(Arg.SignedRanges)), |
| 8388 | UniqueSCEVs(std::move(Arg.UniqueSCEVs)), |
| 8389 | SCEVAllocator(std::move(Arg.SCEVAllocator)), |
| 8390 | FirstUnknown(Arg.FirstUnknown) { |
| 8391 | Arg.FirstUnknown = nullptr; |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8392 | } |
| 8393 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8394 | ScalarEvolution::~ScalarEvolution() { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 8395 | // Iterate through all the SCEVUnknown instances and call their |
| 8396 | // destructors, so that they release their references to their values. |
| 8397 | for (SCEVUnknown *U = FirstUnknown; U; U = U->Next) |
| 8398 | U->~SCEVUnknown(); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 8399 | FirstUnknown = nullptr; |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 8400 | |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 8401 | ValueExprMap.clear(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 8402 | |
| 8403 | // Free any extra memory created for ExitNotTakenInfo in the unlikely event |
| 8404 | // that a loop had multiple computable exits. |
| 8405 | for (DenseMap<const Loop*, BackedgeTakenInfo>::iterator I = |
| 8406 | BackedgeTakenCounts.begin(), E = BackedgeTakenCounts.end(); |
| 8407 | I != E; ++I) { |
| 8408 | I->second.clear(); |
| 8409 | } |
| 8410 | |
Andrew Trick | 7fa4e0f | 2012-05-19 00:48:25 +0000 | [diff] [blame] | 8411 | assert(PendingLoopPredicates.empty() && "isImpliedCond garbage"); |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 8412 | assert(!WalkingBEDominatingConds && "isLoopBackedgeGuardedByCond garbage!"); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 8413 | } |
| 8414 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8415 | bool ScalarEvolution::hasLoopInvariantBackedgeTakenCount(const Loop *L) { |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 8416 | return !isa<SCEVCouldNotCompute>(getBackedgeTakenCount(L)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8417 | } |
| 8418 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8419 | static void PrintLoopInfo(raw_ostream &OS, ScalarEvolution *SE, |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8420 | const Loop *L) { |
| 8421 | // Print all inner loops first |
| 8422 | for (Loop::iterator I = L->begin(), E = L->end(); I != E; ++I) |
| 8423 | PrintLoopInfo(OS, SE, *I); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8424 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8425 | OS << "Loop "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 8426 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8427 | OS << ": "; |
Chris Lattner | d72c3eb | 2004-04-18 22:14:10 +0000 | [diff] [blame] | 8428 | |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 8429 | SmallVector<BasicBlock *, 8> ExitBlocks; |
Chris Lattner | d72c3eb | 2004-04-18 22:14:10 +0000 | [diff] [blame] | 8430 | L->getExitBlocks(ExitBlocks); |
| 8431 | if (ExitBlocks.size() != 1) |
Nick Lewycky | d1200b0 | 2008-01-02 02:49:20 +0000 | [diff] [blame] | 8432 | OS << "<multiple exits> "; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8433 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 8434 | if (SE->hasLoopInvariantBackedgeTakenCount(L)) { |
| 8435 | OS << "backedge-taken count is " << *SE->getBackedgeTakenCount(L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8436 | } else { |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 8437 | OS << "Unpredictable backedge-taken count. "; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8438 | } |
| 8439 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8440 | OS << "\n" |
| 8441 | "Loop "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 8442 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8443 | OS << ": "; |
Dan Gohman | 6994293 | 2009-06-24 00:33:16 +0000 | [diff] [blame] | 8444 | |
| 8445 | if (!isa<SCEVCouldNotCompute>(SE->getMaxBackedgeTakenCount(L))) { |
| 8446 | OS << "max backedge-taken count is " << *SE->getMaxBackedgeTakenCount(L); |
| 8447 | } else { |
| 8448 | OS << "Unpredictable max backedge-taken count. "; |
| 8449 | } |
| 8450 | |
| 8451 | OS << "\n"; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8452 | } |
| 8453 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8454 | void ScalarEvolution::print(raw_ostream &OS) const { |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 8455 | // ScalarEvolution's implementation of the print method is to print |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8456 | // out SCEV values of all instructions that are interesting. Doing |
| 8457 | // this potentially causes it to create new SCEV objects though, |
| 8458 | // which technically conflicts with the const qualifier. This isn't |
Dan Gohman | 028e615 | 2009-07-10 20:25:29 +0000 | [diff] [blame] | 8459 | // observable from outside the class though, so casting away the |
| 8460 | // const isn't dangerous. |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 8461 | ScalarEvolution &SE = *const_cast<ScalarEvolution *>(this); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8462 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8463 | OS << "Classifying expressions for: "; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8464 | F.printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8465 | OS << "\n"; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8466 | for (inst_iterator I = inst_begin(F), E = inst_end(F); I != E; ++I) |
Dan Gohman | d18dc2c | 2010-05-03 17:03:23 +0000 | [diff] [blame] | 8467 | if (isSCEVable(I->getType()) && !isa<CmpInst>(*I)) { |
Dan Gohman | fda3c4a | 2009-07-13 23:03:05 +0000 | [diff] [blame] | 8468 | OS << *I << '\n'; |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 8469 | OS << " --> "; |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8470 | const SCEV *SV = SE.getSCEV(&*I); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8471 | SV->print(OS); |
Sanjoy Das | f257452 | 2015-03-09 21:43:39 +0000 | [diff] [blame] | 8472 | if (!isa<SCEVCouldNotCompute>(SV)) { |
| 8473 | OS << " U: "; |
| 8474 | SE.getUnsignedRange(SV).print(OS); |
| 8475 | OS << " S: "; |
| 8476 | SE.getSignedRange(SV).print(OS); |
| 8477 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8478 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8479 | const Loop *L = LI.getLoopFor((*I).getParent()); |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 8480 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8481 | const SCEV *AtUse = SE.getSCEVAtScope(SV, L); |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 8482 | if (AtUse != SV) { |
| 8483 | OS << " --> "; |
| 8484 | AtUse->print(OS); |
Sanjoy Das | f257452 | 2015-03-09 21:43:39 +0000 | [diff] [blame] | 8485 | if (!isa<SCEVCouldNotCompute>(AtUse)) { |
| 8486 | OS << " U: "; |
| 8487 | SE.getUnsignedRange(AtUse).print(OS); |
| 8488 | OS << " S: "; |
| 8489 | SE.getSignedRange(AtUse).print(OS); |
| 8490 | } |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 8491 | } |
| 8492 | |
| 8493 | if (L) { |
Dan Gohman | 94c468f | 2009-06-18 00:37:45 +0000 | [diff] [blame] | 8494 | OS << "\t\t" "Exits: "; |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8495 | const SCEV *ExitValue = SE.getSCEVAtScope(SV, L->getParentLoop()); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8496 | if (!SE.isLoopInvariant(ExitValue, L)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8497 | OS << "<<Unknown>>"; |
| 8498 | } else { |
| 8499 | OS << *ExitValue; |
| 8500 | } |
| 8501 | } |
| 8502 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8503 | OS << "\n"; |
| 8504 | } |
| 8505 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8506 | OS << "Determining loop execution counts for: "; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8507 | F.printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 8508 | OS << "\n"; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8509 | for (LoopInfo::iterator I = LI.begin(), E = LI.end(); I != E; ++I) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8510 | PrintLoopInfo(OS, &SE, *I); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8511 | } |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 8512 | |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8513 | ScalarEvolution::LoopDisposition |
| 8514 | ScalarEvolution::getLoopDisposition(const SCEV *S, const Loop *L) { |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8515 | auto &Values = LoopDispositions[S]; |
| 8516 | for (auto &V : Values) { |
| 8517 | if (V.getPointer() == L) |
| 8518 | return V.getInt(); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 8519 | } |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8520 | Values.emplace_back(L, LoopVariant); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8521 | LoopDisposition D = computeLoopDisposition(S, L); |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8522 | auto &Values2 = LoopDispositions[S]; |
| 8523 | for (auto &V : make_range(Values2.rbegin(), Values2.rend())) { |
| 8524 | if (V.getPointer() == L) { |
| 8525 | V.setInt(D); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 8526 | break; |
| 8527 | } |
| 8528 | } |
| 8529 | return D; |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8530 | } |
| 8531 | |
| 8532 | ScalarEvolution::LoopDisposition |
| 8533 | ScalarEvolution::computeLoopDisposition(const SCEV *S, const Loop *L) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 8534 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8535 | case scConstant: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8536 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8537 | case scTruncate: |
| 8538 | case scZeroExtend: |
| 8539 | case scSignExtend: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8540 | return getLoopDisposition(cast<SCEVCastExpr>(S)->getOperand(), L); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8541 | case scAddRecExpr: { |
| 8542 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(S); |
| 8543 | |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8544 | // If L is the addrec's loop, it's computable. |
| 8545 | if (AR->getLoop() == L) |
| 8546 | return LoopComputable; |
| 8547 | |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8548 | // Add recurrences are never invariant in the function-body (null loop). |
| 8549 | if (!L) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8550 | return LoopVariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8551 | |
| 8552 | // This recurrence is variant w.r.t. L if L contains AR's loop. |
| 8553 | if (L->contains(AR->getLoop())) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8554 | return LoopVariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8555 | |
| 8556 | // This recurrence is invariant w.r.t. L if AR's loop contains L. |
| 8557 | if (AR->getLoop()->contains(L)) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8558 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8559 | |
| 8560 | // This recurrence is variant w.r.t. L if any of its operands |
| 8561 | // are variant. |
| 8562 | for (SCEVAddRecExpr::op_iterator I = AR->op_begin(), E = AR->op_end(); |
| 8563 | I != E; ++I) |
| 8564 | if (!isLoopInvariant(*I, L)) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8565 | return LoopVariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8566 | |
| 8567 | // Otherwise it's loop-invariant. |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8568 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8569 | } |
| 8570 | case scAddExpr: |
| 8571 | case scMulExpr: |
| 8572 | case scUMaxExpr: |
| 8573 | case scSMaxExpr: { |
| 8574 | const SCEVNAryExpr *NAry = cast<SCEVNAryExpr>(S); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8575 | bool HasVarying = false; |
| 8576 | for (SCEVNAryExpr::op_iterator I = NAry->op_begin(), E = NAry->op_end(); |
| 8577 | I != E; ++I) { |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8578 | LoopDisposition D = getLoopDisposition(*I, L); |
| 8579 | if (D == LoopVariant) |
| 8580 | return LoopVariant; |
| 8581 | if (D == LoopComputable) |
| 8582 | HasVarying = true; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8583 | } |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8584 | return HasVarying ? LoopComputable : LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8585 | } |
| 8586 | case scUDivExpr: { |
| 8587 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(S); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8588 | LoopDisposition LD = getLoopDisposition(UDiv->getLHS(), L); |
| 8589 | if (LD == LoopVariant) |
| 8590 | return LoopVariant; |
| 8591 | LoopDisposition RD = getLoopDisposition(UDiv->getRHS(), L); |
| 8592 | if (RD == LoopVariant) |
| 8593 | return LoopVariant; |
| 8594 | return (LD == LoopInvariant && RD == LoopInvariant) ? |
| 8595 | LoopInvariant : LoopComputable; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8596 | } |
| 8597 | case scUnknown: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8598 | // All non-instruction values are loop invariant. All instructions are loop |
| 8599 | // invariant if they are not contained in the specified loop. |
| 8600 | // Instructions are never considered invariant in the function body |
| 8601 | // (null loop) because they are defined within the "loop". |
| 8602 | if (Instruction *I = dyn_cast<Instruction>(cast<SCEVUnknown>(S)->getValue())) |
| 8603 | return (L && !L->contains(I)) ? LoopInvariant : LoopVariant; |
| 8604 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8605 | case scCouldNotCompute: |
| 8606 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8607 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 8608 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 8609 | } |
| 8610 | |
| 8611 | bool ScalarEvolution::isLoopInvariant(const SCEV *S, const Loop *L) { |
| 8612 | return getLoopDisposition(S, L) == LoopInvariant; |
| 8613 | } |
| 8614 | |
| 8615 | bool ScalarEvolution::hasComputableLoopEvolution(const SCEV *S, const Loop *L) { |
| 8616 | return getLoopDisposition(S, L) == LoopComputable; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 8617 | } |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8618 | |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8619 | ScalarEvolution::BlockDisposition |
| 8620 | ScalarEvolution::getBlockDisposition(const SCEV *S, const BasicBlock *BB) { |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8621 | auto &Values = BlockDispositions[S]; |
| 8622 | for (auto &V : Values) { |
| 8623 | if (V.getPointer() == BB) |
| 8624 | return V.getInt(); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 8625 | } |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8626 | Values.emplace_back(BB, DoesNotDominateBlock); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8627 | BlockDisposition D = computeBlockDisposition(S, BB); |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 8628 | auto &Values2 = BlockDispositions[S]; |
| 8629 | for (auto &V : make_range(Values2.rbegin(), Values2.rend())) { |
| 8630 | if (V.getPointer() == BB) { |
| 8631 | V.setInt(D); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 8632 | break; |
| 8633 | } |
| 8634 | } |
| 8635 | return D; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8636 | } |
| 8637 | |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8638 | ScalarEvolution::BlockDisposition |
| 8639 | ScalarEvolution::computeBlockDisposition(const SCEV *S, const BasicBlock *BB) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 8640 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8641 | case scConstant: |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8642 | return ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8643 | case scTruncate: |
| 8644 | case scZeroExtend: |
| 8645 | case scSignExtend: |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8646 | return getBlockDisposition(cast<SCEVCastExpr>(S)->getOperand(), BB); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8647 | case scAddRecExpr: { |
| 8648 | // This uses a "dominates" query instead of "properly dominates" query |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8649 | // to test for proper dominance too, because the instruction which |
| 8650 | // produces the addrec's value is a PHI, and a PHI effectively properly |
| 8651 | // dominates its entire containing block. |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8652 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(S); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8653 | if (!DT.dominates(AR->getLoop()->getHeader(), BB)) |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8654 | return DoesNotDominateBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8655 | } |
| 8656 | // FALL THROUGH into SCEVNAryExpr handling. |
| 8657 | case scAddExpr: |
| 8658 | case scMulExpr: |
| 8659 | case scUMaxExpr: |
| 8660 | case scSMaxExpr: { |
| 8661 | const SCEVNAryExpr *NAry = cast<SCEVNAryExpr>(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8662 | bool Proper = true; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8663 | for (SCEVNAryExpr::op_iterator I = NAry->op_begin(), E = NAry->op_end(); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8664 | I != E; ++I) { |
| 8665 | BlockDisposition D = getBlockDisposition(*I, BB); |
| 8666 | if (D == DoesNotDominateBlock) |
| 8667 | return DoesNotDominateBlock; |
| 8668 | if (D == DominatesBlock) |
| 8669 | Proper = false; |
| 8670 | } |
| 8671 | return Proper ? ProperlyDominatesBlock : DominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8672 | } |
| 8673 | case scUDivExpr: { |
| 8674 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8675 | const SCEV *LHS = UDiv->getLHS(), *RHS = UDiv->getRHS(); |
| 8676 | BlockDisposition LD = getBlockDisposition(LHS, BB); |
| 8677 | if (LD == DoesNotDominateBlock) |
| 8678 | return DoesNotDominateBlock; |
| 8679 | BlockDisposition RD = getBlockDisposition(RHS, BB); |
| 8680 | if (RD == DoesNotDominateBlock) |
| 8681 | return DoesNotDominateBlock; |
| 8682 | return (LD == ProperlyDominatesBlock && RD == ProperlyDominatesBlock) ? |
| 8683 | ProperlyDominatesBlock : DominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8684 | } |
| 8685 | case scUnknown: |
| 8686 | if (Instruction *I = |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8687 | dyn_cast<Instruction>(cast<SCEVUnknown>(S)->getValue())) { |
| 8688 | if (I->getParent() == BB) |
| 8689 | return DominatesBlock; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8690 | if (DT.properlyDominates(I->getParent(), BB)) |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8691 | return ProperlyDominatesBlock; |
| 8692 | return DoesNotDominateBlock; |
| 8693 | } |
| 8694 | return ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8695 | case scCouldNotCompute: |
| 8696 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8697 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 8698 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8699 | } |
| 8700 | |
| 8701 | bool ScalarEvolution::dominates(const SCEV *S, const BasicBlock *BB) { |
| 8702 | return getBlockDisposition(S, BB) >= DominatesBlock; |
| 8703 | } |
| 8704 | |
| 8705 | bool ScalarEvolution::properlyDominates(const SCEV *S, const BasicBlock *BB) { |
| 8706 | return getBlockDisposition(S, BB) == ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8707 | } |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 8708 | |
Andrew Trick | 365e31c | 2012-07-13 23:33:03 +0000 | [diff] [blame] | 8709 | namespace { |
| 8710 | // Search for a SCEV expression node within an expression tree. |
| 8711 | // Implements SCEVTraversal::Visitor. |
| 8712 | struct SCEVSearch { |
| 8713 | const SCEV *Node; |
| 8714 | bool IsFound; |
| 8715 | |
| 8716 | SCEVSearch(const SCEV *N): Node(N), IsFound(false) {} |
| 8717 | |
| 8718 | bool follow(const SCEV *S) { |
| 8719 | IsFound |= (S == Node); |
| 8720 | return !IsFound; |
| 8721 | } |
| 8722 | bool isDone() const { return IsFound; } |
| 8723 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 8724 | } |
Andrew Trick | 365e31c | 2012-07-13 23:33:03 +0000 | [diff] [blame] | 8725 | |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 8726 | bool ScalarEvolution::hasOperand(const SCEV *S, const SCEV *Op) const { |
Andrew Trick | 365e31c | 2012-07-13 23:33:03 +0000 | [diff] [blame] | 8727 | SCEVSearch Search(Op); |
| 8728 | visitAll(S, Search); |
| 8729 | return Search.IsFound; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 8730 | } |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 8731 | |
| 8732 | void ScalarEvolution::forgetMemoizedResults(const SCEV *S) { |
| 8733 | ValuesAtScopes.erase(S); |
| 8734 | LoopDispositions.erase(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 8735 | BlockDispositions.erase(S); |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 8736 | UnsignedRanges.erase(S); |
| 8737 | SignedRanges.erase(S); |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 8738 | |
| 8739 | for (DenseMap<const Loop*, BackedgeTakenInfo>::iterator I = |
| 8740 | BackedgeTakenCounts.begin(), E = BackedgeTakenCounts.end(); I != E; ) { |
| 8741 | BackedgeTakenInfo &BEInfo = I->second; |
| 8742 | if (BEInfo.hasOperand(S, this)) { |
| 8743 | BEInfo.clear(); |
| 8744 | BackedgeTakenCounts.erase(I++); |
| 8745 | } |
| 8746 | else |
| 8747 | ++I; |
| 8748 | } |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 8749 | } |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8750 | |
| 8751 | typedef DenseMap<const Loop *, std::string> VerifyMap; |
Benjamin Kramer | 24d270d | 2012-10-27 10:45:01 +0000 | [diff] [blame] | 8752 | |
Alp Toker | cb40291 | 2014-01-24 17:20:08 +0000 | [diff] [blame] | 8753 | /// replaceSubString - Replaces all occurrences of From in Str with To. |
Benjamin Kramer | 24d270d | 2012-10-27 10:45:01 +0000 | [diff] [blame] | 8754 | static void replaceSubString(std::string &Str, StringRef From, StringRef To) { |
| 8755 | size_t Pos = 0; |
| 8756 | while ((Pos = Str.find(From, Pos)) != std::string::npos) { |
| 8757 | Str.replace(Pos, From.size(), To.data(), To.size()); |
| 8758 | Pos += To.size(); |
| 8759 | } |
| 8760 | } |
| 8761 | |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8762 | /// getLoopBackedgeTakenCounts - Helper method for verifyAnalysis. |
| 8763 | static void |
| 8764 | getLoopBackedgeTakenCounts(Loop *L, VerifyMap &Map, ScalarEvolution &SE) { |
| 8765 | for (Loop::reverse_iterator I = L->rbegin(), E = L->rend(); I != E; ++I) { |
| 8766 | getLoopBackedgeTakenCounts(*I, Map, SE); // recurse. |
| 8767 | |
| 8768 | std::string &S = Map[L]; |
| 8769 | if (S.empty()) { |
| 8770 | raw_string_ostream OS(S); |
| 8771 | SE.getBackedgeTakenCount(L)->print(OS); |
Benjamin Kramer | 24d270d | 2012-10-27 10:45:01 +0000 | [diff] [blame] | 8772 | |
| 8773 | // false and 0 are semantically equivalent. This can happen in dead loops. |
| 8774 | replaceSubString(OS.str(), "false", "0"); |
| 8775 | // Remove wrap flags, their use in SCEV is highly fragile. |
| 8776 | // FIXME: Remove this when SCEV gets smarter about them. |
| 8777 | replaceSubString(OS.str(), "<nw>", ""); |
| 8778 | replaceSubString(OS.str(), "<nsw>", ""); |
| 8779 | replaceSubString(OS.str(), "<nuw>", ""); |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8780 | } |
| 8781 | } |
| 8782 | } |
| 8783 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8784 | void ScalarEvolution::verify() const { |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8785 | ScalarEvolution &SE = *const_cast<ScalarEvolution *>(this); |
| 8786 | |
| 8787 | // Gather stringified backedge taken counts for all loops using SCEV's caches. |
| 8788 | // FIXME: It would be much better to store actual values instead of strings, |
| 8789 | // but SCEV pointers will change if we drop the caches. |
| 8790 | VerifyMap BackedgeDumpsOld, BackedgeDumpsNew; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8791 | for (LoopInfo::reverse_iterator I = LI.rbegin(), E = LI.rend(); I != E; ++I) |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8792 | getLoopBackedgeTakenCounts(*I, BackedgeDumpsOld, SE); |
| 8793 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8794 | // Gather stringified backedge taken counts for all loops using a fresh |
| 8795 | // ScalarEvolution object. |
| 8796 | ScalarEvolution SE2(F, TLI, AC, DT, LI); |
| 8797 | for (LoopInfo::reverse_iterator I = LI.rbegin(), E = LI.rend(); I != E; ++I) |
| 8798 | getLoopBackedgeTakenCounts(*I, BackedgeDumpsNew, SE2); |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8799 | |
| 8800 | // Now compare whether they're the same with and without caches. This allows |
| 8801 | // verifying that no pass changed the cache. |
| 8802 | assert(BackedgeDumpsOld.size() == BackedgeDumpsNew.size() && |
| 8803 | "New loops suddenly appeared!"); |
| 8804 | |
| 8805 | for (VerifyMap::iterator OldI = BackedgeDumpsOld.begin(), |
| 8806 | OldE = BackedgeDumpsOld.end(), |
| 8807 | NewI = BackedgeDumpsNew.begin(); |
| 8808 | OldI != OldE; ++OldI, ++NewI) { |
| 8809 | assert(OldI->first == NewI->first && "Loop order changed!"); |
| 8810 | |
| 8811 | // Compare the stringified SCEVs. We don't care if undef backedgetaken count |
| 8812 | // changes. |
Benjamin Kramer | 5bc077a | 2012-10-27 11:36:07 +0000 | [diff] [blame] | 8813 | // FIXME: We currently ignore SCEV changes from/to CouldNotCompute. This |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8814 | // means that a pass is buggy or SCEV has to learn a new pattern but is |
| 8815 | // usually not harmful. |
| 8816 | if (OldI->second != NewI->second && |
| 8817 | OldI->second.find("undef") == std::string::npos && |
Benjamin Kramer | 5bc077a | 2012-10-27 11:36:07 +0000 | [diff] [blame] | 8818 | NewI->second.find("undef") == std::string::npos && |
| 8819 | OldI->second != "***COULDNOTCOMPUTE***" && |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8820 | NewI->second != "***COULDNOTCOMPUTE***") { |
Benjamin Kramer | 5bc077a | 2012-10-27 11:36:07 +0000 | [diff] [blame] | 8821 | dbgs() << "SCEVValidator: SCEV for loop '" |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8822 | << OldI->first->getHeader()->getName() |
Benjamin Kramer | 5bc077a | 2012-10-27 11:36:07 +0000 | [diff] [blame] | 8823 | << "' changed from '" << OldI->second |
| 8824 | << "' to '" << NewI->second << "'!\n"; |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 8825 | std::abort(); |
| 8826 | } |
| 8827 | } |
| 8828 | |
| 8829 | // TODO: Verify more things. |
| 8830 | } |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8831 | |
| 8832 | char ScalarEvolutionAnalysis::PassID; |
| 8833 | |
| 8834 | ScalarEvolution ScalarEvolutionAnalysis::run(Function &F, |
| 8835 | AnalysisManager<Function> *AM) { |
| 8836 | return ScalarEvolution(F, AM->getResult<TargetLibraryAnalysis>(F), |
| 8837 | AM->getResult<AssumptionAnalysis>(F), |
| 8838 | AM->getResult<DominatorTreeAnalysis>(F), |
| 8839 | AM->getResult<LoopAnalysis>(F)); |
| 8840 | } |
| 8841 | |
| 8842 | PreservedAnalyses |
| 8843 | ScalarEvolutionPrinterPass::run(Function &F, AnalysisManager<Function> *AM) { |
| 8844 | AM->getResult<ScalarEvolutionAnalysis>(F).print(OS); |
| 8845 | return PreservedAnalyses::all(); |
| 8846 | } |
| 8847 | |
| 8848 | INITIALIZE_PASS_BEGIN(ScalarEvolutionWrapperPass, "scalar-evolution", |
| 8849 | "Scalar Evolution Analysis", false, true) |
| 8850 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
| 8851 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
| 8852 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) |
| 8853 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) |
| 8854 | INITIALIZE_PASS_END(ScalarEvolutionWrapperPass, "scalar-evolution", |
| 8855 | "Scalar Evolution Analysis", false, true) |
| 8856 | char ScalarEvolutionWrapperPass::ID = 0; |
| 8857 | |
| 8858 | ScalarEvolutionWrapperPass::ScalarEvolutionWrapperPass() : FunctionPass(ID) { |
| 8859 | initializeScalarEvolutionWrapperPassPass(*PassRegistry::getPassRegistry()); |
| 8860 | } |
| 8861 | |
| 8862 | bool ScalarEvolutionWrapperPass::runOnFunction(Function &F) { |
| 8863 | SE.reset(new ScalarEvolution( |
| 8864 | F, getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(), |
| 8865 | getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F), |
| 8866 | getAnalysis<DominatorTreeWrapperPass>().getDomTree(), |
| 8867 | getAnalysis<LoopInfoWrapperPass>().getLoopInfo())); |
| 8868 | return false; |
| 8869 | } |
| 8870 | |
| 8871 | void ScalarEvolutionWrapperPass::releaseMemory() { SE.reset(); } |
| 8872 | |
| 8873 | void ScalarEvolutionWrapperPass::print(raw_ostream &OS, const Module *) const { |
| 8874 | SE->print(OS); |
| 8875 | } |
| 8876 | |
| 8877 | void ScalarEvolutionWrapperPass::verifyAnalysis() const { |
| 8878 | if (!VerifySCEV) |
| 8879 | return; |
| 8880 | |
| 8881 | SE->verify(); |
| 8882 | } |
| 8883 | |
| 8884 | void ScalarEvolutionWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const { |
| 8885 | AU.setPreservesAll(); |
| 8886 | AU.addRequiredTransitive<AssumptionCacheTracker>(); |
| 8887 | AU.addRequiredTransitive<LoopInfoWrapperPass>(); |
| 8888 | AU.addRequiredTransitive<DominatorTreeWrapperPass>(); |
| 8889 | AU.addRequiredTransitive<TargetLibraryInfoWrapperPass>(); |
| 8890 | } |