Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 1 | //===- ScalarEvolution.cpp - Scalar Evolution Analysis --------------------===// |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 2 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the implementation of the scalar evolution analysis |
| 11 | // engine, which is used primarily to analyze expressions involving induction |
| 12 | // variables in loops. |
| 13 | // |
| 14 | // There are several aspects to this library. First is the representation of |
| 15 | // scalar expressions, which are represented as subclasses of the SCEV class. |
| 16 | // These classes are used to represent certain types of subexpressions that we |
Dan Gohman | ef2ae2c | 2009-07-25 16:18:07 +0000 | [diff] [blame] | 17 | // can handle. We only create one SCEV of a particular shape, so |
| 18 | // pointer-comparisons for equality are legal. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 19 | // |
| 20 | // One important aspect of the SCEV objects is that they are never cyclic, even |
| 21 | // if there is a cycle in the dataflow for an expression (ie, a PHI node). If |
| 22 | // the PHI node is one of the idioms that we can represent (e.g., a polynomial |
| 23 | // recurrence) then we represent it directly as a recurrence node, otherwise we |
| 24 | // represent it as a SCEVUnknown node. |
| 25 | // |
| 26 | // In addition to being able to represent expressions of various types, we also |
| 27 | // have folders that are used to build the *canonical* representation for a |
| 28 | // particular expression. These folders are capable of using a variety of |
| 29 | // rewrite rules to simplify the expressions. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 30 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 31 | // Once the folders are defined, we can implement the more interesting |
| 32 | // higher-level code, such as the code that recognizes PHI nodes of various |
| 33 | // types, computes the execution count of a loop, etc. |
| 34 | // |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 35 | // TODO: We should use these routines and value representations to implement |
| 36 | // dependence analysis! |
| 37 | // |
| 38 | //===----------------------------------------------------------------------===// |
| 39 | // |
| 40 | // There are several good references for the techniques used in this analysis. |
| 41 | // |
| 42 | // Chains of recurrences -- a method to expedite the evaluation |
| 43 | // of closed-form functions |
| 44 | // Olaf Bachmann, Paul S. Wang, Eugene V. Zima |
| 45 | // |
| 46 | // On computational properties of chains of recurrences |
| 47 | // Eugene V. Zima |
| 48 | // |
| 49 | // Symbolic Evaluation of Chains of Recurrences for Loop Optimization |
| 50 | // Robert A. van Engelen |
| 51 | // |
| 52 | // Efficient Symbolic Analysis for Optimizing Compilers |
| 53 | // Robert A. van Engelen |
| 54 | // |
| 55 | // Using the chains of recurrences algebra for data dependence testing and |
| 56 | // induction variable substitution |
| 57 | // MS Thesis, Johnie Birch |
| 58 | // |
| 59 | //===----------------------------------------------------------------------===// |
| 60 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 61 | #include "llvm/Analysis/ScalarEvolution.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 62 | #include "llvm/ADT/APInt.h" |
| 63 | #include "llvm/ADT/ArrayRef.h" |
| 64 | #include "llvm/ADT/DenseMap.h" |
| 65 | #include "llvm/ADT/DepthFirstIterator.h" |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 66 | #include "llvm/ADT/EquivalenceClasses.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 67 | #include "llvm/ADT/FoldingSet.h" |
| 68 | #include "llvm/ADT/None.h" |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 69 | #include "llvm/ADT/Optional.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 70 | #include "llvm/ADT/STLExtras.h" |
Sanjoy Das | c46bceb | 2016-09-27 18:01:42 +0000 | [diff] [blame] | 71 | #include "llvm/ADT/ScopeExit.h" |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 72 | #include "llvm/ADT/Sequence.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 73 | #include "llvm/ADT/SetVector.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 74 | #include "llvm/ADT/SmallPtrSet.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 75 | #include "llvm/ADT/SmallSet.h" |
| 76 | #include "llvm/ADT/SmallVector.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 77 | #include "llvm/ADT/Statistic.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 78 | #include "llvm/ADT/StringRef.h" |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 79 | #include "llvm/Analysis/AssumptionCache.h" |
John Criswell | fe5f33b | 2005-10-27 15:54:34 +0000 | [diff] [blame] | 80 | #include "llvm/Analysis/ConstantFolding.h" |
Duncan Sands | d06f50e | 2010-11-17 04:18:45 +0000 | [diff] [blame] | 81 | #include "llvm/Analysis/InstructionSimplify.h" |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 82 | #include "llvm/Analysis/LoopInfo.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 83 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chandler Carruth | 62d4215 | 2015-01-15 02:16:27 +0000 | [diff] [blame] | 84 | #include "llvm/Analysis/TargetLibraryInfo.h" |
Dan Gohman | 1ee696d | 2009-06-16 19:52:01 +0000 | [diff] [blame] | 85 | #include "llvm/Analysis/ValueTracking.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 86 | #include "llvm/IR/Argument.h" |
| 87 | #include "llvm/IR/BasicBlock.h" |
| 88 | #include "llvm/IR/CFG.h" |
| 89 | #include "llvm/IR/CallSite.h" |
| 90 | #include "llvm/IR/Constant.h" |
Chandler Carruth | 8cd041e | 2014-03-04 12:24:34 +0000 | [diff] [blame] | 91 | #include "llvm/IR/ConstantRange.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 92 | #include "llvm/IR/Constants.h" |
| 93 | #include "llvm/IR/DataLayout.h" |
| 94 | #include "llvm/IR/DerivedTypes.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 95 | #include "llvm/IR/Dominators.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 96 | #include "llvm/IR/Function.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 97 | #include "llvm/IR/GlobalAlias.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 98 | #include "llvm/IR/GlobalValue.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 99 | #include "llvm/IR/GlobalVariable.h" |
Chandler Carruth | 8394857 | 2014-03-04 10:30:26 +0000 | [diff] [blame] | 100 | #include "llvm/IR/InstIterator.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 101 | #include "llvm/IR/InstrTypes.h" |
| 102 | #include "llvm/IR/Instruction.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 103 | #include "llvm/IR/Instructions.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 104 | #include "llvm/IR/IntrinsicInst.h" |
| 105 | #include "llvm/IR/Intrinsics.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 106 | #include "llvm/IR/LLVMContext.h" |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 107 | #include "llvm/IR/Metadata.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 108 | #include "llvm/IR/Operator.h" |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 109 | #include "llvm/IR/PatternMatch.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 110 | #include "llvm/IR/Type.h" |
| 111 | #include "llvm/IR/Use.h" |
| 112 | #include "llvm/IR/User.h" |
| 113 | #include "llvm/IR/Value.h" |
| 114 | #include "llvm/Pass.h" |
| 115 | #include "llvm/Support/Casting.h" |
Chris Lattner | 996795b | 2006-06-28 23:17:24 +0000 | [diff] [blame] | 116 | #include "llvm/Support/CommandLine.h" |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 117 | #include "llvm/Support/Compiler.h" |
David Greene | 2330f78 | 2009-12-23 22:58:38 +0000 | [diff] [blame] | 118 | #include "llvm/Support/Debug.h" |
Torok Edwin | 56d0659 | 2009-07-11 20:10:48 +0000 | [diff] [blame] | 119 | #include "llvm/Support/ErrorHandling.h" |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 120 | #include "llvm/Support/KnownBits.h" |
Sanjoy Das | 5d9a8cb | 2015-09-22 00:10:57 +0000 | [diff] [blame] | 121 | #include "llvm/Support/SaveAndRestore.h" |
Chandler Carruth | 6bda14b | 2017-06-06 11:49:48 +0000 | [diff] [blame] | 122 | #include "llvm/Support/raw_ostream.h" |
Alkis Evlogimenos | a5c04ee | 2004-09-03 18:19:51 +0000 | [diff] [blame] | 123 | #include <algorithm> |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 124 | #include <cassert> |
| 125 | #include <climits> |
| 126 | #include <cstddef> |
| 127 | #include <cstdint> |
| 128 | #include <cstdlib> |
| 129 | #include <map> |
| 130 | #include <memory> |
| 131 | #include <tuple> |
| 132 | #include <utility> |
| 133 | #include <vector> |
| 134 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 135 | using namespace llvm; |
| 136 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 137 | #define DEBUG_TYPE "scalar-evolution" |
| 138 | |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 139 | STATISTIC(NumArrayLenItCounts, |
| 140 | "Number of trip counts computed with array length"); |
| 141 | STATISTIC(NumTripCountsComputed, |
| 142 | "Number of loops with predictable loop counts"); |
| 143 | STATISTIC(NumTripCountsNotComputed, |
| 144 | "Number of loops without predictable loop counts"); |
| 145 | STATISTIC(NumBruteForceTripCountsComputed, |
| 146 | "Number of loops with trip counts computed by force"); |
| 147 | |
Dan Gohman | d78c400 | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 148 | static cl::opt<unsigned> |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 149 | MaxBruteForceIterations("scalar-evolution-max-iterations", cl::ReallyHidden, |
| 150 | cl::desc("Maximum number of iterations SCEV will " |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 151 | "symbolically execute a constant " |
| 152 | "derived loop"), |
Chris Lattner | 57ef942 | 2006-12-19 22:30:33 +0000 | [diff] [blame] | 153 | cl::init(100)); |
| 154 | |
Filipe Cabecinhas | 0da9937 | 2016-04-29 15:22:48 +0000 | [diff] [blame] | 155 | // FIXME: Enable this with EXPENSIVE_CHECKS when the test suite is clean. |
Zachary Turner | 8065f0b | 2017-12-01 00:53:10 +0000 | [diff] [blame] | 156 | static cl::opt<bool> VerifySCEV( |
| 157 | "verify-scev", cl::Hidden, |
| 158 | cl::desc("Verify ScalarEvolution's backedge taken counts (slow)")); |
Sanjoy Das | 0cdcdf0 | 2017-04-24 02:35:19 +0000 | [diff] [blame] | 159 | static cl::opt<bool> |
Zachary Turner | 8065f0b | 2017-12-01 00:53:10 +0000 | [diff] [blame] | 160 | VerifySCEVMap("verify-scev-maps", cl::Hidden, |
Jeroen Ketema | e48e393 | 2016-04-12 23:21:46 +0000 | [diff] [blame] | 161 | cl::desc("Verify no dangling value in ScalarEvolution's " |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 162 | "ExprValueMap (slow)")); |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 163 | |
Li Huang | fcfe8cd | 2016-10-20 21:38:39 +0000 | [diff] [blame] | 164 | static cl::opt<unsigned> MulOpsInlineThreshold( |
| 165 | "scev-mulops-inline-threshold", cl::Hidden, |
| 166 | cl::desc("Threshold for inlining multiplication operands into a SCEV"), |
Max Kazantsev | eac01d4 | 2017-06-21 07:28:13 +0000 | [diff] [blame] | 167 | cl::init(32)); |
Li Huang | fcfe8cd | 2016-10-20 21:38:39 +0000 | [diff] [blame] | 168 | |
Daniil Fukalov | b09dac5 | 2017-01-26 13:33:17 +0000 | [diff] [blame] | 169 | static cl::opt<unsigned> AddOpsInlineThreshold( |
| 170 | "scev-addops-inline-threshold", cl::Hidden, |
Max Kazantsev | 0bcf6ec | 2017-06-20 08:37:31 +0000 | [diff] [blame] | 171 | cl::desc("Threshold for inlining addition operands into a SCEV"), |
Daniil Fukalov | b09dac5 | 2017-01-26 13:33:17 +0000 | [diff] [blame] | 172 | cl::init(500)); |
| 173 | |
Sanjoy Das | 1bd479d | 2017-03-05 23:49:17 +0000 | [diff] [blame] | 174 | static cl::opt<unsigned> MaxSCEVCompareDepth( |
| 175 | "scalar-evolution-max-scev-compare-depth", cl::Hidden, |
| 176 | cl::desc("Maximum depth of recursive SCEV complexity comparisons"), |
| 177 | cl::init(32)); |
| 178 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 179 | static cl::opt<unsigned> MaxSCEVOperationsImplicationDepth( |
| 180 | "scalar-evolution-max-scev-operations-implication-depth", cl::Hidden, |
| 181 | cl::desc("Maximum depth of recursive SCEV operations implication analysis"), |
| 182 | cl::init(2)); |
| 183 | |
Sanjoy Das | 1bd479d | 2017-03-05 23:49:17 +0000 | [diff] [blame] | 184 | static cl::opt<unsigned> MaxValueCompareDepth( |
| 185 | "scalar-evolution-max-value-compare-depth", cl::Hidden, |
| 186 | cl::desc("Maximum depth of recursive value complexity comparisons"), |
| 187 | cl::init(2)); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 188 | |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 189 | static cl::opt<unsigned> |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 190 | MaxArithDepth("scalar-evolution-max-arith-depth", cl::Hidden, |
| 191 | cl::desc("Maximum depth of recursive arithmetics"), |
| 192 | cl::init(32)); |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 193 | |
Michael Liao | 468fb74 | 2017-01-13 18:28:30 +0000 | [diff] [blame] | 194 | static cl::opt<unsigned> MaxConstantEvolvingDepth( |
| 195 | "scalar-evolution-max-constant-evolving-depth", cl::Hidden, |
| 196 | cl::desc("Maximum depth of recursive constant evolving"), cl::init(32)); |
| 197 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 198 | static cl::opt<unsigned> |
| 199 | MaxExtDepth("scalar-evolution-max-ext-depth", cl::Hidden, |
| 200 | cl::desc("Maximum depth of recursive SExt/ZExt"), |
| 201 | cl::init(8)); |
| 202 | |
Max Kazantsev | 0e9e079 | 2017-07-23 15:40:19 +0000 | [diff] [blame] | 203 | static cl::opt<unsigned> |
| 204 | MaxAddRecSize("scalar-evolution-max-add-rec-size", cl::Hidden, |
| 205 | cl::desc("Max coefficients in AddRec during evolving"), |
| 206 | cl::init(16)); |
| 207 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 208 | //===----------------------------------------------------------------------===// |
| 209 | // SCEV class definitions |
| 210 | //===----------------------------------------------------------------------===// |
| 211 | |
| 212 | //===----------------------------------------------------------------------===// |
| 213 | // Implementation of the SCEV class. |
| 214 | // |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 215 | |
Aaron Ballman | 615eb47 | 2017-10-15 14:32:27 +0000 | [diff] [blame] | 216 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 217 | LLVM_DUMP_METHOD void SCEV::dump() const { |
Davide Italiano | 2071f4c | 2015-10-25 19:55:24 +0000 | [diff] [blame] | 218 | print(dbgs()); |
| 219 | dbgs() << '\n'; |
| 220 | } |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 221 | #endif |
Davide Italiano | 2071f4c | 2015-10-25 19:55:24 +0000 | [diff] [blame] | 222 | |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 223 | void SCEV::print(raw_ostream &OS) const { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 224 | switch (static_cast<SCEVTypes>(getSCEVType())) { |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 225 | case scConstant: |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 226 | cast<SCEVConstant>(this)->getValue()->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 227 | return; |
| 228 | case scTruncate: { |
| 229 | const SCEVTruncateExpr *Trunc = cast<SCEVTruncateExpr>(this); |
| 230 | const SCEV *Op = Trunc->getOperand(); |
| 231 | OS << "(trunc " << *Op->getType() << " " << *Op << " to " |
| 232 | << *Trunc->getType() << ")"; |
| 233 | return; |
| 234 | } |
| 235 | case scZeroExtend: { |
| 236 | const SCEVZeroExtendExpr *ZExt = cast<SCEVZeroExtendExpr>(this); |
| 237 | const SCEV *Op = ZExt->getOperand(); |
| 238 | OS << "(zext " << *Op->getType() << " " << *Op << " to " |
| 239 | << *ZExt->getType() << ")"; |
| 240 | return; |
| 241 | } |
| 242 | case scSignExtend: { |
| 243 | const SCEVSignExtendExpr *SExt = cast<SCEVSignExtendExpr>(this); |
| 244 | const SCEV *Op = SExt->getOperand(); |
| 245 | OS << "(sext " << *Op->getType() << " " << *Op << " to " |
| 246 | << *SExt->getType() << ")"; |
| 247 | return; |
| 248 | } |
| 249 | case scAddRecExpr: { |
| 250 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(this); |
| 251 | OS << "{" << *AR->getOperand(0); |
| 252 | for (unsigned i = 1, e = AR->getNumOperands(); i != e; ++i) |
| 253 | OS << ",+," << *AR->getOperand(i); |
| 254 | OS << "}<"; |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 255 | if (AR->hasNoUnsignedWrap()) |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 256 | OS << "nuw><"; |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 257 | if (AR->hasNoSignedWrap()) |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 258 | OS << "nsw><"; |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 259 | if (AR->hasNoSelfWrap() && |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 260 | !AR->getNoWrapFlags((NoWrapFlags)(FlagNUW | FlagNSW))) |
| 261 | OS << "nw><"; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 262 | AR->getLoop()->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 263 | OS << ">"; |
| 264 | return; |
| 265 | } |
| 266 | case scAddExpr: |
| 267 | case scMulExpr: |
| 268 | case scUMaxExpr: |
| 269 | case scSMaxExpr: { |
| 270 | const SCEVNAryExpr *NAry = cast<SCEVNAryExpr>(this); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 271 | const char *OpStr = nullptr; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 272 | switch (NAry->getSCEVType()) { |
| 273 | case scAddExpr: OpStr = " + "; break; |
| 274 | case scMulExpr: OpStr = " * "; break; |
| 275 | case scUMaxExpr: OpStr = " umax "; break; |
| 276 | case scSMaxExpr: OpStr = " smax "; break; |
| 277 | } |
| 278 | OS << "("; |
| 279 | for (SCEVNAryExpr::op_iterator I = NAry->op_begin(), E = NAry->op_end(); |
| 280 | I != E; ++I) { |
| 281 | OS << **I; |
Benjamin Kramer | b6d0bd4 | 2014-03-02 12:27:27 +0000 | [diff] [blame] | 282 | if (std::next(I) != E) |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 283 | OS << OpStr; |
| 284 | } |
| 285 | OS << ")"; |
Andrew Trick | d912a5b | 2011-11-29 02:06:35 +0000 | [diff] [blame] | 286 | switch (NAry->getSCEVType()) { |
| 287 | case scAddExpr: |
| 288 | case scMulExpr: |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 289 | if (NAry->hasNoUnsignedWrap()) |
Andrew Trick | d912a5b | 2011-11-29 02:06:35 +0000 | [diff] [blame] | 290 | OS << "<nuw>"; |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 291 | if (NAry->hasNoSignedWrap()) |
Andrew Trick | d912a5b | 2011-11-29 02:06:35 +0000 | [diff] [blame] | 292 | OS << "<nsw>"; |
| 293 | } |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 294 | return; |
| 295 | } |
| 296 | case scUDivExpr: { |
| 297 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(this); |
| 298 | OS << "(" << *UDiv->getLHS() << " /u " << *UDiv->getRHS() << ")"; |
| 299 | return; |
| 300 | } |
| 301 | case scUnknown: { |
| 302 | const SCEVUnknown *U = cast<SCEVUnknown>(this); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 303 | Type *AllocTy; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 304 | if (U->isSizeOf(AllocTy)) { |
| 305 | OS << "sizeof(" << *AllocTy << ")"; |
| 306 | return; |
| 307 | } |
| 308 | if (U->isAlignOf(AllocTy)) { |
| 309 | OS << "alignof(" << *AllocTy << ")"; |
| 310 | return; |
| 311 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 312 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 313 | Type *CTy; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 314 | Constant *FieldNo; |
| 315 | if (U->isOffsetOf(CTy, FieldNo)) { |
| 316 | OS << "offsetof(" << *CTy << ", "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 317 | FieldNo->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 318 | OS << ")"; |
| 319 | return; |
| 320 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 321 | |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 322 | // Otherwise just print it normally. |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 323 | U->getValue()->printAsOperand(OS, false); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 324 | return; |
| 325 | } |
| 326 | case scCouldNotCompute: |
| 327 | OS << "***COULDNOTCOMPUTE***"; |
| 328 | return; |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 329 | } |
| 330 | llvm_unreachable("Unknown SCEV kind!"); |
| 331 | } |
| 332 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 333 | Type *SCEV::getType() const { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 334 | switch (static_cast<SCEVTypes>(getSCEVType())) { |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 335 | case scConstant: |
| 336 | return cast<SCEVConstant>(this)->getType(); |
| 337 | case scTruncate: |
| 338 | case scZeroExtend: |
| 339 | case scSignExtend: |
| 340 | return cast<SCEVCastExpr>(this)->getType(); |
| 341 | case scAddRecExpr: |
| 342 | case scMulExpr: |
| 343 | case scUMaxExpr: |
| 344 | case scSMaxExpr: |
| 345 | return cast<SCEVNAryExpr>(this)->getType(); |
| 346 | case scAddExpr: |
| 347 | return cast<SCEVAddExpr>(this)->getType(); |
| 348 | case scUDivExpr: |
| 349 | return cast<SCEVUDivExpr>(this)->getType(); |
| 350 | case scUnknown: |
| 351 | return cast<SCEVUnknown>(this)->getType(); |
| 352 | case scCouldNotCompute: |
| 353 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 354 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 355 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 356 | } |
| 357 | |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 358 | bool SCEV::isZero() const { |
| 359 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
| 360 | return SC->getValue()->isZero(); |
| 361 | return false; |
| 362 | } |
| 363 | |
Dan Gohman | ba7f6d8 | 2009-05-18 15:22:39 +0000 | [diff] [blame] | 364 | bool SCEV::isOne() const { |
| 365 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
| 366 | return SC->getValue()->isOne(); |
| 367 | return false; |
| 368 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 369 | |
Dan Gohman | 18a96bb | 2009-06-24 00:30:26 +0000 | [diff] [blame] | 370 | bool SCEV::isAllOnesValue() const { |
| 371 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(this)) |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 372 | return SC->getValue()->isMinusOne(); |
Dan Gohman | 18a96bb | 2009-06-24 00:30:26 +0000 | [diff] [blame] | 373 | return false; |
| 374 | } |
| 375 | |
Andrew Trick | 881a776 | 2012-01-07 00:27:31 +0000 | [diff] [blame] | 376 | bool SCEV::isNonConstantNegative() const { |
| 377 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(this); |
| 378 | if (!Mul) return false; |
| 379 | |
| 380 | // If there is a constant factor, it will be first. |
| 381 | const SCEVConstant *SC = dyn_cast<SCEVConstant>(Mul->getOperand(0)); |
| 382 | if (!SC) return false; |
| 383 | |
| 384 | // Return true if the value is negative, this matches things like (-42 * V). |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 385 | return SC->getAPInt().isNegative(); |
Andrew Trick | 881a776 | 2012-01-07 00:27:31 +0000 | [diff] [blame] | 386 | } |
| 387 | |
Owen Anderson | 04052ec | 2009-06-22 21:57:23 +0000 | [diff] [blame] | 388 | SCEVCouldNotCompute::SCEVCouldNotCompute() : |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 389 | SCEV(FoldingSetNodeIDRef(), scCouldNotCompute) {} |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 390 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 391 | bool SCEVCouldNotCompute::classof(const SCEV *S) { |
| 392 | return S->getSCEVType() == scCouldNotCompute; |
| 393 | } |
| 394 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 395 | const SCEV *ScalarEvolution::getConstant(ConstantInt *V) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 396 | FoldingSetNodeID ID; |
| 397 | ID.AddInteger(scConstant); |
| 398 | ID.AddPointer(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 399 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 400 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 401 | SCEV *S = new (SCEVAllocator) SCEVConstant(ID.Intern(SCEVAllocator), V); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 402 | UniqueSCEVs.InsertNode(S, IP); |
| 403 | return S; |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 404 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 405 | |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 406 | const SCEV *ScalarEvolution::getConstant(const APInt &Val) { |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 407 | return getConstant(ConstantInt::get(getContext(), Val)); |
Dan Gohman | 0a76e7f | 2007-07-09 15:25:17 +0000 | [diff] [blame] | 408 | } |
| 409 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 410 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 411 | ScalarEvolution::getConstant(Type *Ty, uint64_t V, bool isSigned) { |
| 412 | IntegerType *ITy = cast<IntegerType>(getEffectiveSCEVType(Ty)); |
Dan Gohman | a029cbe | 2010-04-21 16:04:04 +0000 | [diff] [blame] | 413 | return getConstant(ConstantInt::get(ITy, V, isSigned)); |
Dan Gohman | 7ccc52f | 2009-06-15 22:12:54 +0000 | [diff] [blame] | 414 | } |
| 415 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 416 | SCEVCastExpr::SCEVCastExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 417 | unsigned SCEVTy, const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 418 | : SCEV(ID, SCEVTy), Op(op), Ty(ty) {} |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 419 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 420 | SCEVTruncateExpr::SCEVTruncateExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 421 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 422 | : SCEVCastExpr(ID, scTruncate, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 423 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 424 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 425 | "Cannot truncate non-integer value!"); |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 426 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 427 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 428 | SCEVZeroExtendExpr::SCEVZeroExtendExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 429 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 430 | : SCEVCastExpr(ID, scZeroExtend, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 431 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 432 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 433 | "Cannot zero extend non-integer value!"); |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 434 | } |
| 435 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 436 | SCEVSignExtendExpr::SCEVSignExtendExpr(const FoldingSetNodeIDRef ID, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 437 | const SCEV *op, Type *ty) |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 438 | : SCEVCastExpr(ID, scSignExtend, op, ty) { |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 439 | assert((Op->getType()->isIntegerTy() || Op->getType()->isPointerTy()) && |
| 440 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 441 | "Cannot sign extend non-integer value!"); |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 442 | } |
| 443 | |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 444 | void SCEVUnknown::deleted() { |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 445 | // Clear this SCEVUnknown from various maps. |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 446 | SE->forgetMemoizedResults(this); |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 447 | |
| 448 | // Remove this SCEVUnknown from the uniquing map. |
| 449 | SE->UniqueSCEVs.RemoveNode(this); |
| 450 | |
| 451 | // Release the value. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 452 | setValPtr(nullptr); |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 453 | } |
| 454 | |
| 455 | void SCEVUnknown::allUsesReplacedWith(Value *New) { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 456 | // Remove this SCEVUnknown from the uniquing map. |
| 457 | SE->UniqueSCEVs.RemoveNode(this); |
| 458 | |
| 459 | // Update this SCEVUnknown to point to the new value. This is needed |
| 460 | // because there may still be outstanding SCEVs which still point to |
| 461 | // this SCEVUnknown. |
| 462 | setValPtr(New); |
| 463 | } |
| 464 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 465 | bool SCEVUnknown::isSizeOf(Type *&AllocTy) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 466 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 467 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 468 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 469 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 470 | CE->getOperand(0)->isNullValue() && |
| 471 | CE->getNumOperands() == 2) |
| 472 | if (ConstantInt *CI = dyn_cast<ConstantInt>(CE->getOperand(1))) |
| 473 | if (CI->isOne()) { |
| 474 | AllocTy = cast<PointerType>(CE->getOperand(0)->getType()) |
| 475 | ->getElementType(); |
| 476 | return true; |
| 477 | } |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 478 | |
| 479 | return false; |
| 480 | } |
| 481 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 482 | bool SCEVUnknown::isAlignOf(Type *&AllocTy) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 483 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 484 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 485 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 486 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 487 | CE->getOperand(0)->isNullValue()) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 488 | Type *Ty = |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 489 | cast<PointerType>(CE->getOperand(0)->getType())->getElementType(); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 490 | if (StructType *STy = dyn_cast<StructType>(Ty)) |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 491 | if (!STy->isPacked() && |
| 492 | CE->getNumOperands() == 3 && |
| 493 | CE->getOperand(1)->isNullValue()) { |
| 494 | if (ConstantInt *CI = dyn_cast<ConstantInt>(CE->getOperand(2))) |
| 495 | if (CI->isOne() && |
| 496 | STy->getNumElements() == 2 && |
Duncan Sands | 9dff9be | 2010-02-15 16:12:20 +0000 | [diff] [blame] | 497 | STy->getElementType(0)->isIntegerTy(1)) { |
Dan Gohman | 7e5f1b2 | 2010-02-02 01:38:49 +0000 | [diff] [blame] | 498 | AllocTy = STy->getElementType(1); |
| 499 | return true; |
| 500 | } |
| 501 | } |
| 502 | } |
Dan Gohman | cf91383 | 2010-01-28 02:15:55 +0000 | [diff] [blame] | 503 | |
| 504 | return false; |
| 505 | } |
| 506 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 507 | bool SCEVUnknown::isOffsetOf(Type *&CTy, Constant *&FieldNo) const { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 508 | if (ConstantExpr *VCE = dyn_cast<ConstantExpr>(getValue())) |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 509 | if (VCE->getOpcode() == Instruction::PtrToInt) |
| 510 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(VCE->getOperand(0))) |
| 511 | if (CE->getOpcode() == Instruction::GetElementPtr && |
| 512 | CE->getNumOperands() == 3 && |
| 513 | CE->getOperand(0)->isNullValue() && |
| 514 | CE->getOperand(1)->isNullValue()) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 515 | Type *Ty = |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 516 | cast<PointerType>(CE->getOperand(0)->getType())->getElementType(); |
| 517 | // Ignore vector types here so that ScalarEvolutionExpander doesn't |
| 518 | // emit getelementptrs that index into vectors. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 519 | if (Ty->isStructTy() || Ty->isArrayTy()) { |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 520 | CTy = Ty; |
| 521 | FieldNo = CE->getOperand(2); |
| 522 | return true; |
| 523 | } |
| 524 | } |
| 525 | |
| 526 | return false; |
| 527 | } |
| 528 | |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 529 | //===----------------------------------------------------------------------===// |
| 530 | // SCEV Utilities |
| 531 | //===----------------------------------------------------------------------===// |
| 532 | |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 533 | /// Compare the two values \p LV and \p RV in terms of their "complexity" where |
| 534 | /// "complexity" is a partial (and somewhat ad-hoc) relation used to order |
| 535 | /// operands in SCEV expressions. \p EqCache is a set of pairs of values that |
| 536 | /// have been previously deemed to be "equally complex" by this routine. It is |
| 537 | /// intended to avoid exponential time complexity in cases like: |
| 538 | /// |
| 539 | /// %a = f(%x, %y) |
| 540 | /// %b = f(%a, %a) |
| 541 | /// %c = f(%b, %b) |
| 542 | /// |
| 543 | /// %d = f(%x, %y) |
| 544 | /// %e = f(%d, %d) |
| 545 | /// %f = f(%e, %e) |
| 546 | /// |
| 547 | /// CompareValueComplexity(%f, %c) |
| 548 | /// |
| 549 | /// Since we do not continue running this routine on expression trees once we |
| 550 | /// have seen unequal values, there is no need to track them in the cache. |
| 551 | static int |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 552 | CompareValueComplexity(EquivalenceClasses<const Value *> &EqCacheValue, |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 553 | const LoopInfo *const LI, Value *LV, Value *RV, |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 554 | unsigned Depth) { |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 555 | if (Depth > MaxValueCompareDepth || EqCacheValue.isEquivalent(LV, RV)) |
Sanjoy Das | 507dd40 | 2016-10-18 17:45:16 +0000 | [diff] [blame] | 556 | return 0; |
| 557 | |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 558 | // Order pointer values after integer values. This helps SCEVExpander form |
| 559 | // GEPs. |
| 560 | bool LIsPointer = LV->getType()->isPointerTy(), |
| 561 | RIsPointer = RV->getType()->isPointerTy(); |
| 562 | if (LIsPointer != RIsPointer) |
| 563 | return (int)LIsPointer - (int)RIsPointer; |
| 564 | |
| 565 | // Compare getValueID values. |
| 566 | unsigned LID = LV->getValueID(), RID = RV->getValueID(); |
| 567 | if (LID != RID) |
| 568 | return (int)LID - (int)RID; |
| 569 | |
| 570 | // Sort arguments by their position. |
Sanjoy Das | b4830a8 | 2016-10-30 23:52:53 +0000 | [diff] [blame] | 571 | if (const auto *LA = dyn_cast<Argument>(LV)) { |
| 572 | const auto *RA = cast<Argument>(RV); |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 573 | unsigned LArgNo = LA->getArgNo(), RArgNo = RA->getArgNo(); |
| 574 | return (int)LArgNo - (int)RArgNo; |
| 575 | } |
| 576 | |
Sanjoy Das | 299e672 | 2016-10-30 23:52:56 +0000 | [diff] [blame] | 577 | if (const auto *LGV = dyn_cast<GlobalValue>(LV)) { |
| 578 | const auto *RGV = cast<GlobalValue>(RV); |
| 579 | |
| 580 | const auto IsGVNameSemantic = [&](const GlobalValue *GV) { |
| 581 | auto LT = GV->getLinkage(); |
| 582 | return !(GlobalValue::isPrivateLinkage(LT) || |
| 583 | GlobalValue::isInternalLinkage(LT)); |
| 584 | }; |
| 585 | |
| 586 | // Use the names to distinguish the two values, but only if the |
| 587 | // names are semantically important. |
| 588 | if (IsGVNameSemantic(LGV) && IsGVNameSemantic(RGV)) |
| 589 | return LGV->getName().compare(RGV->getName()); |
| 590 | } |
| 591 | |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 592 | // For instructions, compare their loop depth, and their operand count. This |
| 593 | // is pretty loose. |
Sanjoy Das | b4830a8 | 2016-10-30 23:52:53 +0000 | [diff] [blame] | 594 | if (const auto *LInst = dyn_cast<Instruction>(LV)) { |
| 595 | const auto *RInst = cast<Instruction>(RV); |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 596 | |
| 597 | // Compare loop depths. |
| 598 | const BasicBlock *LParent = LInst->getParent(), |
| 599 | *RParent = RInst->getParent(); |
| 600 | if (LParent != RParent) { |
| 601 | unsigned LDepth = LI->getLoopDepth(LParent), |
| 602 | RDepth = LI->getLoopDepth(RParent); |
| 603 | if (LDepth != RDepth) |
| 604 | return (int)LDepth - (int)RDepth; |
| 605 | } |
| 606 | |
| 607 | // Compare the number of operands. |
| 608 | unsigned LNumOps = LInst->getNumOperands(), |
| 609 | RNumOps = RInst->getNumOperands(); |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 610 | if (LNumOps != RNumOps) |
Sanjoy Das | 507dd40 | 2016-10-18 17:45:16 +0000 | [diff] [blame] | 611 | return (int)LNumOps - (int)RNumOps; |
| 612 | |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 613 | for (unsigned Idx : seq(0u, LNumOps)) { |
| 614 | int Result = |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 615 | CompareValueComplexity(EqCacheValue, LI, LInst->getOperand(Idx), |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 616 | RInst->getOperand(Idx), Depth + 1); |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 617 | if (Result != 0) |
Daniil Fukalov | e870398 | 2016-11-16 16:41:40 +0000 | [diff] [blame] | 618 | return Result; |
Sanjoy Das | 1707869 | 2016-10-31 03:32:43 +0000 | [diff] [blame] | 619 | } |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 620 | } |
| 621 | |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 622 | EqCacheValue.unionSets(LV, RV); |
Sanjoy Das | 9cd877a | 2016-10-18 17:45:13 +0000 | [diff] [blame] | 623 | return 0; |
| 624 | } |
| 625 | |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 626 | // Return negative, zero, or positive, if LHS is less than, equal to, or greater |
| 627 | // than RHS, respectively. A three-way result allows recursive comparisons to be |
| 628 | // more efficient. |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 629 | static int CompareSCEVComplexity( |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 630 | EquivalenceClasses<const SCEV *> &EqCacheSCEV, |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 631 | EquivalenceClasses<const Value *> &EqCacheValue, |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 632 | const LoopInfo *const LI, const SCEV *LHS, const SCEV *RHS, |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 633 | DominatorTree &DT, unsigned Depth = 0) { |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 634 | // Fast-path: SCEVs are uniqued so we can do a quick equality check. |
| 635 | if (LHS == RHS) |
| 636 | return 0; |
Dan Gohman | 9ba542c | 2009-05-07 14:39:04 +0000 | [diff] [blame] | 637 | |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 638 | // Primarily, sort the SCEVs by their getSCEVType(). |
| 639 | unsigned LType = LHS->getSCEVType(), RType = RHS->getSCEVType(); |
| 640 | if (LType != RType) |
| 641 | return (int)LType - (int)RType; |
Dan Gohman | 2706567 | 2010-08-27 15:26:01 +0000 | [diff] [blame] | 642 | |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 643 | if (Depth > MaxSCEVCompareDepth || EqCacheSCEV.isEquivalent(LHS, RHS)) |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 644 | return 0; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 645 | // Aside from the getSCEVType() ordering, the particular ordering |
| 646 | // isn't very important except that it's beneficial to be consistent, |
| 647 | // so that (a + b) and (b + a) don't end up as different expressions. |
| 648 | switch (static_cast<SCEVTypes>(LType)) { |
| 649 | case scUnknown: { |
| 650 | const SCEVUnknown *LU = cast<SCEVUnknown>(LHS); |
| 651 | const SCEVUnknown *RU = cast<SCEVUnknown>(RHS); |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 652 | |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 653 | int X = CompareValueComplexity(EqCacheValue, LI, LU->getValue(), |
| 654 | RU->getValue(), Depth + 1); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 655 | if (X == 0) |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 656 | EqCacheSCEV.unionSets(LHS, RHS); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 657 | return X; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 658 | } |
Sanjoy Das | 7881abd | 2015-12-08 04:32:51 +0000 | [diff] [blame] | 659 | |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 660 | case scConstant: { |
| 661 | const SCEVConstant *LC = cast<SCEVConstant>(LHS); |
| 662 | const SCEVConstant *RC = cast<SCEVConstant>(RHS); |
| 663 | |
| 664 | // Compare constant values. |
| 665 | const APInt &LA = LC->getAPInt(); |
| 666 | const APInt &RA = RC->getAPInt(); |
| 667 | unsigned LBitWidth = LA.getBitWidth(), RBitWidth = RA.getBitWidth(); |
| 668 | if (LBitWidth != RBitWidth) |
| 669 | return (int)LBitWidth - (int)RBitWidth; |
| 670 | return LA.ult(RA) ? -1 : 1; |
| 671 | } |
| 672 | |
| 673 | case scAddRecExpr: { |
| 674 | const SCEVAddRecExpr *LA = cast<SCEVAddRecExpr>(LHS); |
| 675 | const SCEVAddRecExpr *RA = cast<SCEVAddRecExpr>(RHS); |
| 676 | |
Max Kazantsev | 4c7f293 | 2017-05-17 04:09:14 +0000 | [diff] [blame] | 677 | // There is always a dominance between two recs that are used by one SCEV, |
| 678 | // so we can safely sort recs by loop header dominance. We require such |
| 679 | // order in getAddExpr. |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 680 | const Loop *LLoop = LA->getLoop(), *RLoop = RA->getLoop(); |
| 681 | if (LLoop != RLoop) { |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 682 | const BasicBlock *LHead = LLoop->getHeader(), *RHead = RLoop->getHeader(); |
| 683 | assert(LHead != RHead && "Two loops share the same header?"); |
| 684 | if (DT.dominates(LHead, RHead)) |
| 685 | return 1; |
Max Kazantsev | 4c7f293 | 2017-05-17 04:09:14 +0000 | [diff] [blame] | 686 | else |
| 687 | assert(DT.dominates(RHead, LHead) && |
| 688 | "No dominance between recurrences used by one SCEV?"); |
| 689 | return -1; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 690 | } |
| 691 | |
| 692 | // Addrec complexity grows with operand count. |
| 693 | unsigned LNumOps = LA->getNumOperands(), RNumOps = RA->getNumOperands(); |
| 694 | if (LNumOps != RNumOps) |
| 695 | return (int)LNumOps - (int)RNumOps; |
| 696 | |
Max Kazantsev | d4f5987 | 2017-12-06 12:44:56 +0000 | [diff] [blame] | 697 | // Compare NoWrap flags. |
| 698 | if (LA->getNoWrapFlags() != RA->getNoWrapFlags()) |
| 699 | return (int)LA->getNoWrapFlags() - (int)RA->getNoWrapFlags(); |
| 700 | |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 701 | // Lexicographically compare. |
| 702 | for (unsigned i = 0; i != LNumOps; ++i) { |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 703 | int X = CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, |
| 704 | LA->getOperand(i), RA->getOperand(i), DT, |
| 705 | Depth + 1); |
Sanjoy Das | 7881abd | 2015-12-08 04:32:51 +0000 | [diff] [blame] | 706 | if (X != 0) |
| 707 | return X; |
Sanjoy Das | 7881abd | 2015-12-08 04:32:51 +0000 | [diff] [blame] | 708 | } |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 709 | EqCacheSCEV.unionSets(LHS, RHS); |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 710 | return 0; |
Sanjoy Das | 7881abd | 2015-12-08 04:32:51 +0000 | [diff] [blame] | 711 | } |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 712 | |
| 713 | case scAddExpr: |
| 714 | case scMulExpr: |
| 715 | case scSMaxExpr: |
| 716 | case scUMaxExpr: { |
| 717 | const SCEVNAryExpr *LC = cast<SCEVNAryExpr>(LHS); |
| 718 | const SCEVNAryExpr *RC = cast<SCEVNAryExpr>(RHS); |
| 719 | |
| 720 | // Lexicographically compare n-ary expressions. |
| 721 | unsigned LNumOps = LC->getNumOperands(), RNumOps = RC->getNumOperands(); |
| 722 | if (LNumOps != RNumOps) |
| 723 | return (int)LNumOps - (int)RNumOps; |
| 724 | |
Max Kazantsev | d4f5987 | 2017-12-06 12:44:56 +0000 | [diff] [blame] | 725 | // Compare NoWrap flags. |
| 726 | if (LC->getNoWrapFlags() != RC->getNoWrapFlags()) |
| 727 | return (int)LC->getNoWrapFlags() - (int)RC->getNoWrapFlags(); |
| 728 | |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 729 | for (unsigned i = 0; i != LNumOps; ++i) { |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 730 | int X = CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, |
| 731 | LC->getOperand(i), RC->getOperand(i), DT, |
| 732 | Depth + 1); |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 733 | if (X != 0) |
| 734 | return X; |
| 735 | } |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 736 | EqCacheSCEV.unionSets(LHS, RHS); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 737 | return 0; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 738 | } |
| 739 | |
| 740 | case scUDivExpr: { |
| 741 | const SCEVUDivExpr *LC = cast<SCEVUDivExpr>(LHS); |
| 742 | const SCEVUDivExpr *RC = cast<SCEVUDivExpr>(RHS); |
| 743 | |
| 744 | // Lexicographically compare udiv expressions. |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 745 | int X = CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, LC->getLHS(), |
| 746 | RC->getLHS(), DT, Depth + 1); |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 747 | if (X != 0) |
| 748 | return X; |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 749 | X = CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, LC->getRHS(), |
| 750 | RC->getRHS(), DT, Depth + 1); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 751 | if (X == 0) |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 752 | EqCacheSCEV.unionSets(LHS, RHS); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 753 | return X; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 754 | } |
| 755 | |
| 756 | case scTruncate: |
| 757 | case scZeroExtend: |
| 758 | case scSignExtend: { |
| 759 | const SCEVCastExpr *LC = cast<SCEVCastExpr>(LHS); |
| 760 | const SCEVCastExpr *RC = cast<SCEVCastExpr>(RHS); |
| 761 | |
| 762 | // Compare cast expressions by operand. |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 763 | int X = CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, |
| 764 | LC->getOperand(), RC->getOperand(), DT, |
| 765 | Depth + 1); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 766 | if (X == 0) |
Max Kazantsev | cf9b1b2 | 2017-11-28 07:48:12 +0000 | [diff] [blame] | 767 | EqCacheSCEV.unionSets(LHS, RHS); |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 768 | return X; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 769 | } |
| 770 | |
| 771 | case scCouldNotCompute: |
| 772 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
| 773 | } |
| 774 | llvm_unreachable("Unknown SCEV kind!"); |
| 775 | } |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 776 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 777 | /// Given a list of SCEV objects, order them by their complexity, and group |
| 778 | /// objects of the same complexity together by value. When this routine is |
| 779 | /// finished, we know that any duplicates in the vector are consecutive and that |
| 780 | /// complexity is monotonically increasing. |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 781 | /// |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 782 | /// Note that we go take special precautions to ensure that we get deterministic |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 783 | /// results from this routine. In other words, we don't want the results of |
| 784 | /// this to depend on where the addresses of various SCEV objects happened to |
| 785 | /// land in memory. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 786 | static void GroupByComplexity(SmallVectorImpl<const SCEV *> &Ops, |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 787 | LoopInfo *LI, DominatorTree &DT) { |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 788 | if (Ops.size() < 2) return; // Noop |
Daniil Fukalov | 4c3322c | 2016-11-17 16:07:52 +0000 | [diff] [blame] | 789 | |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 790 | EquivalenceClasses<const SCEV *> EqCacheSCEV; |
| 791 | EquivalenceClasses<const Value *> EqCacheValue; |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 792 | if (Ops.size() == 2) { |
| 793 | // This is the common case, which also happens to be trivially simple. |
| 794 | // Special case it. |
Dan Gohman | 7712d29 | 2010-08-29 15:07:13 +0000 | [diff] [blame] | 795 | const SCEV *&LHS = Ops[0], *&RHS = Ops[1]; |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 796 | if (CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, RHS, LHS, DT) < 0) |
Dan Gohman | 7712d29 | 2010-08-29 15:07:13 +0000 | [diff] [blame] | 797 | std::swap(LHS, RHS); |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 798 | return; |
| 799 | } |
| 800 | |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 801 | // Do the rough sort by complexity. |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 802 | std::stable_sort(Ops.begin(), Ops.end(), |
Max Kazantsev | 1c66ae6 | 2017-12-06 08:58:16 +0000 | [diff] [blame] | 803 | [&](const SCEV *LHS, const SCEV *RHS) { |
| 804 | return CompareSCEVComplexity(EqCacheSCEV, EqCacheValue, LI, |
| 805 | LHS, RHS, DT) < 0; |
Sanjoy Das | 237c845 | 2016-09-27 18:01:48 +0000 | [diff] [blame] | 806 | }); |
Dan Gohman | 24ceda8 | 2010-06-18 19:54:20 +0000 | [diff] [blame] | 807 | |
| 808 | // Now that we are sorted by complexity, group elements of the same |
| 809 | // complexity. Note that this is, at worst, N^2, but the vector is likely to |
| 810 | // be extremely short in practice. Note that we take this approach because we |
| 811 | // do not want to depend on the addresses of the objects we are grouping. |
| 812 | for (unsigned i = 0, e = Ops.size(); i != e-2; ++i) { |
| 813 | const SCEV *S = Ops[i]; |
| 814 | unsigned Complexity = S->getSCEVType(); |
| 815 | |
| 816 | // If there are any objects of the same complexity and same value as this |
| 817 | // one, group them. |
| 818 | for (unsigned j = i+1; j != e && Ops[j]->getSCEVType() == Complexity; ++j) { |
| 819 | if (Ops[j] == S) { // Found a duplicate. |
| 820 | // Move it to immediately after i'th element. |
| 821 | std::swap(Ops[i+1], Ops[j]); |
| 822 | ++i; // no need to rescan it. |
| 823 | if (i == e-2) return; // Done! |
| 824 | } |
| 825 | } |
| 826 | } |
Chris Lattner | eb3e840 | 2004-06-20 06:23:15 +0000 | [diff] [blame] | 827 | } |
| 828 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 829 | // Returns the size of the SCEV S. |
| 830 | static inline int sizeOfSCEV(const SCEV *S) { |
Sanjoy Das | 7d75267 | 2015-12-08 04:32:54 +0000 | [diff] [blame] | 831 | struct FindSCEVSize { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 832 | int Size = 0; |
| 833 | |
| 834 | FindSCEVSize() = default; |
Sanjoy Das | 7d75267 | 2015-12-08 04:32:54 +0000 | [diff] [blame] | 835 | |
| 836 | bool follow(const SCEV *S) { |
| 837 | ++Size; |
| 838 | // Keep looking at all operands of S. |
| 839 | return true; |
| 840 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 841 | |
Sanjoy Das | 7d75267 | 2015-12-08 04:32:54 +0000 | [diff] [blame] | 842 | bool isDone() const { |
| 843 | return false; |
| 844 | } |
| 845 | }; |
| 846 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 847 | FindSCEVSize F; |
| 848 | SCEVTraversal<FindSCEVSize> ST(F); |
| 849 | ST.visitAll(S); |
| 850 | return F.Size; |
| 851 | } |
| 852 | |
| 853 | namespace { |
| 854 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 855 | struct SCEVDivision : public SCEVVisitor<SCEVDivision, void> { |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 856 | public: |
| 857 | // Computes the Quotient and Remainder of the division of Numerator by |
| 858 | // Denominator. |
| 859 | static void divide(ScalarEvolution &SE, const SCEV *Numerator, |
| 860 | const SCEV *Denominator, const SCEV **Quotient, |
| 861 | const SCEV **Remainder) { |
| 862 | assert(Numerator && Denominator && "Uninitialized SCEV"); |
| 863 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 864 | SCEVDivision D(SE, Numerator, Denominator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 865 | |
| 866 | // Check for the trivial case here to avoid having to check for it in the |
| 867 | // rest of the code. |
| 868 | if (Numerator == Denominator) { |
| 869 | *Quotient = D.One; |
| 870 | *Remainder = D.Zero; |
| 871 | return; |
| 872 | } |
| 873 | |
| 874 | if (Numerator->isZero()) { |
| 875 | *Quotient = D.Zero; |
| 876 | *Remainder = D.Zero; |
| 877 | return; |
| 878 | } |
| 879 | |
Brendon Cahoon | a57cc8b | 2015-04-20 16:03:28 +0000 | [diff] [blame] | 880 | // A simple case when N/1. The quotient is N. |
| 881 | if (Denominator->isOne()) { |
| 882 | *Quotient = Numerator; |
| 883 | *Remainder = D.Zero; |
| 884 | return; |
| 885 | } |
| 886 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 887 | // Split the Denominator when it is a product. |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 888 | if (const SCEVMulExpr *T = dyn_cast<SCEVMulExpr>(Denominator)) { |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 889 | const SCEV *Q, *R; |
| 890 | *Quotient = Numerator; |
| 891 | for (const SCEV *Op : T->operands()) { |
| 892 | divide(SE, *Quotient, Op, &Q, &R); |
| 893 | *Quotient = Q; |
| 894 | |
| 895 | // Bail out when the Numerator is not divisible by one of the terms of |
| 896 | // the Denominator. |
| 897 | if (!R->isZero()) { |
| 898 | *Quotient = D.Zero; |
| 899 | *Remainder = Numerator; |
| 900 | return; |
| 901 | } |
| 902 | } |
| 903 | *Remainder = D.Zero; |
| 904 | return; |
| 905 | } |
| 906 | |
| 907 | D.visit(Numerator); |
| 908 | *Quotient = D.Quotient; |
| 909 | *Remainder = D.Remainder; |
| 910 | } |
| 911 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 912 | // Except in the trivial case described above, we do not know how to divide |
| 913 | // Expr by Denominator for the following functions with empty implementation. |
| 914 | void visitTruncateExpr(const SCEVTruncateExpr *Numerator) {} |
| 915 | void visitZeroExtendExpr(const SCEVZeroExtendExpr *Numerator) {} |
| 916 | void visitSignExtendExpr(const SCEVSignExtendExpr *Numerator) {} |
| 917 | void visitUDivExpr(const SCEVUDivExpr *Numerator) {} |
| 918 | void visitSMaxExpr(const SCEVSMaxExpr *Numerator) {} |
| 919 | void visitUMaxExpr(const SCEVUMaxExpr *Numerator) {} |
| 920 | void visitUnknown(const SCEVUnknown *Numerator) {} |
| 921 | void visitCouldNotCompute(const SCEVCouldNotCompute *Numerator) {} |
| 922 | |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 923 | void visitConstant(const SCEVConstant *Numerator) { |
| 924 | if (const SCEVConstant *D = dyn_cast<SCEVConstant>(Denominator)) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 925 | APInt NumeratorVal = Numerator->getAPInt(); |
| 926 | APInt DenominatorVal = D->getAPInt(); |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 927 | uint32_t NumeratorBW = NumeratorVal.getBitWidth(); |
| 928 | uint32_t DenominatorBW = DenominatorVal.getBitWidth(); |
| 929 | |
| 930 | if (NumeratorBW > DenominatorBW) |
| 931 | DenominatorVal = DenominatorVal.sext(NumeratorBW); |
| 932 | else if (NumeratorBW < DenominatorBW) |
| 933 | NumeratorVal = NumeratorVal.sext(DenominatorBW); |
| 934 | |
| 935 | APInt QuotientVal(NumeratorVal.getBitWidth(), 0); |
| 936 | APInt RemainderVal(NumeratorVal.getBitWidth(), 0); |
| 937 | APInt::sdivrem(NumeratorVal, DenominatorVal, QuotientVal, RemainderVal); |
| 938 | Quotient = SE.getConstant(QuotientVal); |
| 939 | Remainder = SE.getConstant(RemainderVal); |
| 940 | return; |
| 941 | } |
| 942 | } |
| 943 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 944 | void visitAddRecExpr(const SCEVAddRecExpr *Numerator) { |
| 945 | const SCEV *StartQ, *StartR, *StepQ, *StepR; |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 946 | if (!Numerator->isAffine()) |
| 947 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 948 | divide(SE, Numerator->getStart(), Denominator, &StartQ, &StartR); |
| 949 | divide(SE, Numerator->getStepRecurrence(SE), Denominator, &StepQ, &StepR); |
Brendon Cahoon | f9751ad | 2015-04-22 15:06:40 +0000 | [diff] [blame] | 950 | // Bail out if the types do not match. |
| 951 | Type *Ty = Denominator->getType(); |
| 952 | if (Ty != StartQ->getType() || Ty != StartR->getType() || |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 953 | Ty != StepQ->getType() || Ty != StepR->getType()) |
| 954 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 955 | Quotient = SE.getAddRecExpr(StartQ, StepQ, Numerator->getLoop(), |
| 956 | Numerator->getNoWrapFlags()); |
| 957 | Remainder = SE.getAddRecExpr(StartR, StepR, Numerator->getLoop(), |
| 958 | Numerator->getNoWrapFlags()); |
| 959 | } |
| 960 | |
| 961 | void visitAddExpr(const SCEVAddExpr *Numerator) { |
| 962 | SmallVector<const SCEV *, 2> Qs, Rs; |
| 963 | Type *Ty = Denominator->getType(); |
| 964 | |
| 965 | for (const SCEV *Op : Numerator->operands()) { |
| 966 | const SCEV *Q, *R; |
| 967 | divide(SE, Op, Denominator, &Q, &R); |
| 968 | |
| 969 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 970 | if (Ty != Q->getType() || Ty != R->getType()) |
| 971 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 972 | |
| 973 | Qs.push_back(Q); |
| 974 | Rs.push_back(R); |
| 975 | } |
| 976 | |
| 977 | if (Qs.size() == 1) { |
| 978 | Quotient = Qs[0]; |
| 979 | Remainder = Rs[0]; |
| 980 | return; |
| 981 | } |
| 982 | |
| 983 | Quotient = SE.getAddExpr(Qs); |
| 984 | Remainder = SE.getAddExpr(Rs); |
| 985 | } |
| 986 | |
| 987 | void visitMulExpr(const SCEVMulExpr *Numerator) { |
| 988 | SmallVector<const SCEV *, 2> Qs; |
| 989 | Type *Ty = Denominator->getType(); |
| 990 | |
| 991 | bool FoundDenominatorTerm = false; |
| 992 | for (const SCEV *Op : Numerator->operands()) { |
| 993 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 994 | if (Ty != Op->getType()) |
| 995 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 996 | |
| 997 | if (FoundDenominatorTerm) { |
| 998 | Qs.push_back(Op); |
| 999 | continue; |
| 1000 | } |
| 1001 | |
| 1002 | // Check whether Denominator divides one of the product operands. |
| 1003 | const SCEV *Q, *R; |
| 1004 | divide(SE, Op, Denominator, &Q, &R); |
| 1005 | if (!R->isZero()) { |
| 1006 | Qs.push_back(Op); |
| 1007 | continue; |
| 1008 | } |
| 1009 | |
| 1010 | // Bail out if types do not match. |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 1011 | if (Ty != Q->getType()) |
| 1012 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1013 | |
| 1014 | FoundDenominatorTerm = true; |
| 1015 | Qs.push_back(Q); |
| 1016 | } |
| 1017 | |
| 1018 | if (FoundDenominatorTerm) { |
| 1019 | Remainder = Zero; |
| 1020 | if (Qs.size() == 1) |
| 1021 | Quotient = Qs[0]; |
| 1022 | else |
| 1023 | Quotient = SE.getMulExpr(Qs); |
| 1024 | return; |
| 1025 | } |
| 1026 | |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 1027 | if (!isa<SCEVUnknown>(Denominator)) |
| 1028 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1029 | |
| 1030 | // The Remainder is obtained by replacing Denominator by 0 in Numerator. |
| 1031 | ValueToValueMap RewriteMap; |
| 1032 | RewriteMap[cast<SCEVUnknown>(Denominator)->getValue()] = |
| 1033 | cast<SCEVConstant>(Zero)->getValue(); |
| 1034 | Remainder = SCEVParameterRewriter::rewrite(Numerator, SE, RewriteMap, true); |
| 1035 | |
| 1036 | if (Remainder->isZero()) { |
| 1037 | // The Quotient is obtained by replacing Denominator by 1 in Numerator. |
| 1038 | RewriteMap[cast<SCEVUnknown>(Denominator)->getValue()] = |
| 1039 | cast<SCEVConstant>(One)->getValue(); |
| 1040 | Quotient = |
| 1041 | SCEVParameterRewriter::rewrite(Numerator, SE, RewriteMap, true); |
| 1042 | return; |
| 1043 | } |
| 1044 | |
| 1045 | // Quotient is (Numerator - Remainder) divided by Denominator. |
| 1046 | const SCEV *Q, *R; |
| 1047 | const SCEV *Diff = SE.getMinusSCEV(Numerator, Remainder); |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 1048 | // This SCEV does not seem to simplify: fail the division here. |
| 1049 | if (sizeOfSCEV(Diff) > sizeOfSCEV(Numerator)) |
| 1050 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1051 | divide(SE, Diff, Denominator, &Q, &R); |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 1052 | if (R != Zero) |
| 1053 | return cannotDivide(Numerator); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1054 | Quotient = Q; |
| 1055 | } |
| 1056 | |
| 1057 | private: |
David Majnemer | 5d2670c | 2014-11-17 11:27:45 +0000 | [diff] [blame] | 1058 | SCEVDivision(ScalarEvolution &S, const SCEV *Numerator, |
| 1059 | const SCEV *Denominator) |
| 1060 | : SE(S), Denominator(Denominator) { |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 1061 | Zero = SE.getZero(Denominator->getType()); |
| 1062 | One = SE.getOne(Denominator->getType()); |
David Majnemer | 5d2670c | 2014-11-17 11:27:45 +0000 | [diff] [blame] | 1063 | |
Matthew Simpson | ddb4d97 | 2015-09-10 18:12:47 +0000 | [diff] [blame] | 1064 | // We generally do not know how to divide Expr by Denominator. We |
| 1065 | // initialize the division to a "cannot divide" state to simplify the rest |
| 1066 | // of the code. |
| 1067 | cannotDivide(Numerator); |
| 1068 | } |
| 1069 | |
| 1070 | // Convenience function for giving up on the division. We set the quotient to |
| 1071 | // be equal to zero and the remainder to be equal to the numerator. |
| 1072 | void cannotDivide(const SCEV *Numerator) { |
David Majnemer | 5d2670c | 2014-11-17 11:27:45 +0000 | [diff] [blame] | 1073 | Quotient = Zero; |
| 1074 | Remainder = Numerator; |
| 1075 | } |
| 1076 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1077 | ScalarEvolution &SE; |
| 1078 | const SCEV *Denominator, *Quotient, *Remainder, *Zero, *One; |
David Majnemer | 32b8ccf | 2014-11-16 20:35:19 +0000 | [diff] [blame] | 1079 | }; |
| 1080 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 1081 | } // end anonymous namespace |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 1082 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1083 | //===----------------------------------------------------------------------===// |
| 1084 | // Simple SCEV method implementations |
| 1085 | //===----------------------------------------------------------------------===// |
| 1086 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 1087 | /// Compute BC(It, K). The result has width W. Assume, K > 0. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1088 | static const SCEV *BinomialCoefficient(const SCEV *It, unsigned K, |
Dan Gohman | 32291b1 | 2009-07-21 00:38:55 +0000 | [diff] [blame] | 1089 | ScalarEvolution &SE, |
Nick Lewycky | 702cf1e | 2011-09-06 06:39:54 +0000 | [diff] [blame] | 1090 | Type *ResultTy) { |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1091 | // Handle the simplest case efficiently. |
| 1092 | if (K == 1) |
| 1093 | return SE.getTruncateOrZeroExtend(It, ResultTy); |
| 1094 | |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1095 | // We are using the following formula for BC(It, K): |
| 1096 | // |
| 1097 | // BC(It, K) = (It * (It - 1) * ... * (It - K + 1)) / K! |
| 1098 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1099 | // Suppose, W is the bitwidth of the return value. We must be prepared for |
| 1100 | // overflow. Hence, we must assure that the result of our computation is |
| 1101 | // equal to the accurate one modulo 2^W. Unfortunately, division isn't |
| 1102 | // safe in modular arithmetic. |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1103 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1104 | // However, this code doesn't use exactly that formula; the formula it uses |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 1105 | // is something like the following, where T is the number of factors of 2 in |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1106 | // K! (i.e. trailing zeros in the binary representation of K!), and ^ is |
| 1107 | // exponentiation: |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1108 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1109 | // BC(It, K) = (It * (It - 1) * ... * (It - K + 1)) / 2^T / (K! / 2^T) |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1110 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1111 | // This formula is trivially equivalent to the previous formula. However, |
| 1112 | // this formula can be implemented much more efficiently. The trick is that |
| 1113 | // K! / 2^T is odd, and exact division by an odd number *is* safe in modular |
| 1114 | // arithmetic. To do exact division in modular arithmetic, all we have |
| 1115 | // to do is multiply by the inverse. Therefore, this step can be done at |
| 1116 | // width W. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 1117 | // |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1118 | // The next issue is how to safely do the division by 2^T. The way this |
| 1119 | // is done is by doing the multiplication step at a width of at least W + T |
| 1120 | // bits. This way, the bottom W+T bits of the product are accurate. Then, |
| 1121 | // when we perform the division by 2^T (which is equivalent to a right shift |
| 1122 | // by T), the bottom W bits are accurate. Extra bits are okay; they'll get |
| 1123 | // truncated out after the division by 2^T. |
| 1124 | // |
| 1125 | // In comparison to just directly using the first formula, this technique |
| 1126 | // is much more efficient; using the first formula requires W * K bits, |
| 1127 | // but this formula less than W + K bits. Also, the first formula requires |
| 1128 | // a division step, whereas this formula only requires multiplies and shifts. |
| 1129 | // |
| 1130 | // It doesn't matter whether the subtraction step is done in the calculation |
| 1131 | // width or the input iteration count's width; if the subtraction overflows, |
| 1132 | // the result must be zero anyway. We prefer here to do it in the width of |
| 1133 | // the induction variable because it helps a lot for certain cases; CodeGen |
| 1134 | // isn't smart enough to ignore the overflow, which leads to much less |
| 1135 | // efficient code if the width of the subtraction is wider than the native |
| 1136 | // register width. |
| 1137 | // |
| 1138 | // (It's possible to not widen at all by pulling out factors of 2 before |
| 1139 | // the multiplication; for example, K=2 can be calculated as |
| 1140 | // It/2*(It+(It*INT_MIN/INT_MIN)+-1). However, it requires |
| 1141 | // extra arithmetic, so it's not an obvious win, and it gets |
| 1142 | // much more complicated for K > 3.) |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1143 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1144 | // Protection from insane SCEVs; this bound is conservative, |
| 1145 | // but it probably doesn't matter. |
| 1146 | if (K > 1000) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 1147 | return SE.getCouldNotCompute(); |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1148 | |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1149 | unsigned W = SE.getTypeSizeInBits(ResultTy); |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1150 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1151 | // Calculate K! / 2^T and T; we divide out the factors of two before |
| 1152 | // multiplying for calculating K! / 2^T to avoid overflow. |
| 1153 | // Other overflow doesn't matter because we only care about the bottom |
| 1154 | // W bits of the result. |
| 1155 | APInt OddFactorial(W, 1); |
| 1156 | unsigned T = 1; |
| 1157 | for (unsigned i = 3; i <= K; ++i) { |
| 1158 | APInt Mult(W, i); |
| 1159 | unsigned TwoFactors = Mult.countTrailingZeros(); |
| 1160 | T += TwoFactors; |
Craig Topper | fc947bc | 2017-04-18 17:14:21 +0000 | [diff] [blame] | 1161 | Mult.lshrInPlace(TwoFactors); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1162 | OddFactorial *= Mult; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1163 | } |
Nick Lewycky | ed169d5 | 2008-06-13 04:38:55 +0000 | [diff] [blame] | 1164 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1165 | // We need at least W + T bits for the multiplication step |
Nick Lewycky | 21add8f | 2009-01-25 08:16:27 +0000 | [diff] [blame] | 1166 | unsigned CalculationBits = W + T; |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1167 | |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 1168 | // Calculate 2^T, at width T+W. |
Benjamin Kramer | fc3ea6f | 2013-07-11 16:05:50 +0000 | [diff] [blame] | 1169 | APInt DivFactor = APInt::getOneBitSet(CalculationBits, T); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1170 | |
| 1171 | // Calculate the multiplicative inverse of K! / 2^T; |
| 1172 | // this multiplication factor will perform the exact division by |
| 1173 | // K! / 2^T. |
| 1174 | APInt Mod = APInt::getSignedMinValue(W+1); |
| 1175 | APInt MultiplyFactor = OddFactorial.zext(W+1); |
| 1176 | MultiplyFactor = MultiplyFactor.multiplicativeInverse(Mod); |
| 1177 | MultiplyFactor = MultiplyFactor.trunc(W); |
| 1178 | |
| 1179 | // Calculate the product, at width T+W |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1180 | IntegerType *CalculationTy = IntegerType::get(SE.getContext(), |
Owen Anderson | 55f1c09 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 1181 | CalculationBits); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1182 | const SCEV *Dividend = SE.getTruncateOrZeroExtend(It, CalculationTy); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1183 | for (unsigned i = 1; i != K; ++i) { |
Dan Gohman | 1d2ded7 | 2010-05-03 22:09:21 +0000 | [diff] [blame] | 1184 | const SCEV *S = SE.getMinusSCEV(It, SE.getConstant(It->getType(), i)); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1185 | Dividend = SE.getMulExpr(Dividend, |
| 1186 | SE.getTruncateOrZeroExtend(S, CalculationTy)); |
| 1187 | } |
| 1188 | |
| 1189 | // Divide by 2^T |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1190 | const SCEV *DivResult = SE.getUDivExpr(Dividend, SE.getConstant(DivFactor)); |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 1191 | |
| 1192 | // Truncate the result, and divide by K! / 2^T. |
| 1193 | |
| 1194 | return SE.getMulExpr(SE.getConstant(MultiplyFactor), |
| 1195 | SE.getTruncateOrZeroExtend(DivResult, ResultTy)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1196 | } |
| 1197 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 1198 | /// Return the value of this chain of recurrences at the specified iteration |
| 1199 | /// number. We can evaluate this recurrence by multiplying each element in the |
| 1200 | /// chain by the binomial coefficient corresponding to it. In other words, we |
| 1201 | /// can evaluate {A,+,B,+,C,+,D} as: |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1202 | /// |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1203 | /// A*BC(It, 0) + B*BC(It, 1) + C*BC(It, 2) + D*BC(It, 3) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1204 | /// |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1205 | /// where BC(It, k) stands for binomial coefficient. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1206 | const SCEV *SCEVAddRecExpr::evaluateAtIteration(const SCEV *It, |
Dan Gohman | 32291b1 | 2009-07-21 00:38:55 +0000 | [diff] [blame] | 1207 | ScalarEvolution &SE) const { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1208 | const SCEV *Result = getStart(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1209 | for (unsigned i = 1, e = getNumOperands(); i != e; ++i) { |
Wojciech Matyjewicz | d2d9764 | 2008-02-11 11:03:14 +0000 | [diff] [blame] | 1210 | // The computation is correct in the face of overflow provided that the |
| 1211 | // multiplication is performed _after_ the evaluation of the binomial |
| 1212 | // coefficient. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1213 | const SCEV *Coeff = BinomialCoefficient(It, i, SE, getType()); |
Nick Lewycky | 707663e | 2008-10-13 03:58:02 +0000 | [diff] [blame] | 1214 | if (isa<SCEVCouldNotCompute>(Coeff)) |
| 1215 | return Coeff; |
| 1216 | |
| 1217 | Result = SE.getAddExpr(Result, SE.getMulExpr(getOperand(i), Coeff)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1218 | } |
| 1219 | return Result; |
| 1220 | } |
| 1221 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1222 | //===----------------------------------------------------------------------===// |
| 1223 | // SCEV Expression folder implementations |
| 1224 | //===----------------------------------------------------------------------===// |
| 1225 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1226 | const SCEV *ScalarEvolution::getTruncateExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1227 | Type *Ty) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1228 | assert(getTypeSizeInBits(Op->getType()) > getTypeSizeInBits(Ty) && |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1229 | "This is not a truncating conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1230 | assert(isSCEVable(Ty) && |
| 1231 | "This is not a conversion to a SCEVable type!"); |
| 1232 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1233 | |
Dan Gohman | 3a302cb | 2009-07-13 20:50:19 +0000 | [diff] [blame] | 1234 | FoldingSetNodeID ID; |
| 1235 | ID.AddInteger(scTruncate); |
| 1236 | ID.AddPointer(Op); |
| 1237 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1238 | void *IP = nullptr; |
Dan Gohman | 3a302cb | 2009-07-13 20:50:19 +0000 | [diff] [blame] | 1239 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
| 1240 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1241 | // Fold if the operand is constant. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1242 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
Dan Gohman | 8d7576e | 2009-06-24 00:38:39 +0000 | [diff] [blame] | 1243 | return getConstant( |
Nuno Lopes | ab5c924 | 2012-05-15 15:44:38 +0000 | [diff] [blame] | 1244 | cast<ConstantInt>(ConstantExpr::getTrunc(SC->getValue(), Ty))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1245 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1246 | // trunc(trunc(x)) --> trunc(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1247 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1248 | return getTruncateExpr(ST->getOperand(), Ty); |
| 1249 | |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1250 | // trunc(sext(x)) --> sext(x) if widening or trunc(x) if narrowing |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1251 | if (const SCEVSignExtendExpr *SS = dyn_cast<SCEVSignExtendExpr>(Op)) |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1252 | return getTruncateOrSignExtend(SS->getOperand(), Ty); |
| 1253 | |
| 1254 | // trunc(zext(x)) --> zext(x) if widening or trunc(x) if narrowing |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1255 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
Nick Lewycky | b4d9f7a | 2009-04-23 05:15:08 +0000 | [diff] [blame] | 1256 | return getTruncateOrZeroExtend(SZ->getOperand(), Ty); |
| 1257 | |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1258 | // trunc(x1+x2+...+xN) --> trunc(x1)+trunc(x2)+...+trunc(xN) if we can |
Nick Lewycky | 2ce2832 | 2015-03-20 02:52:23 +0000 | [diff] [blame] | 1259 | // eliminate all the truncates, or we replace other casts with truncates. |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1260 | if (const SCEVAddExpr *SA = dyn_cast<SCEVAddExpr>(Op)) { |
| 1261 | SmallVector<const SCEV *, 4> Operands; |
| 1262 | bool hasTrunc = false; |
| 1263 | for (unsigned i = 0, e = SA->getNumOperands(); i != e && !hasTrunc; ++i) { |
| 1264 | const SCEV *S = getTruncateExpr(SA->getOperand(i), Ty); |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1265 | if (!isa<SCEVCastExpr>(SA->getOperand(i))) |
| 1266 | hasTrunc = isa<SCEVTruncateExpr>(S); |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1267 | Operands.push_back(S); |
| 1268 | } |
| 1269 | if (!hasTrunc) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1270 | return getAddExpr(Operands); |
Serguei Katkov | edf3c82 | 2017-12-27 07:15:23 +0000 | [diff] [blame] | 1271 | // In spite we checked in the beginning that ID is not in the cache, |
| 1272 | // it is possible that during recursion and different modification |
| 1273 | // ID came to cache, so if we found it, just return it. |
| 1274 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) |
| 1275 | return S; |
Nick Lewycky | 5143f0f | 2011-01-19 16:59:46 +0000 | [diff] [blame] | 1276 | } |
| 1277 | |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1278 | // trunc(x1*x2*...*xN) --> trunc(x1)*trunc(x2)*...*trunc(xN) if we can |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1279 | // eliminate all the truncates, or we replace other casts with truncates. |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1280 | if (const SCEVMulExpr *SM = dyn_cast<SCEVMulExpr>(Op)) { |
| 1281 | SmallVector<const SCEV *, 4> Operands; |
| 1282 | bool hasTrunc = false; |
| 1283 | for (unsigned i = 0, e = SM->getNumOperands(); i != e && !hasTrunc; ++i) { |
| 1284 | const SCEV *S = getTruncateExpr(SM->getOperand(i), Ty); |
Nick Lewycky | be8af48 | 2015-03-20 02:25:00 +0000 | [diff] [blame] | 1285 | if (!isa<SCEVCastExpr>(SM->getOperand(i))) |
| 1286 | hasTrunc = isa<SCEVTruncateExpr>(S); |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1287 | Operands.push_back(S); |
| 1288 | } |
| 1289 | if (!hasTrunc) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1290 | return getMulExpr(Operands); |
Serguei Katkov | edf3c82 | 2017-12-27 07:15:23 +0000 | [diff] [blame] | 1291 | // In spite we checked in the beginning that ID is not in the cache, |
| 1292 | // it is possible that during recursion and different modification |
| 1293 | // ID came to cache, so if we found it, just return it. |
| 1294 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) |
| 1295 | return S; |
Nick Lewycky | 5c901f3 | 2011-01-19 18:56:00 +0000 | [diff] [blame] | 1296 | } |
| 1297 | |
Dan Gohman | 5a728c9 | 2009-06-18 16:24:47 +0000 | [diff] [blame] | 1298 | // If the input value is a chrec scev, truncate the chrec's operands. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1299 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(Op)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1300 | SmallVector<const SCEV *, 4> Operands; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 1301 | for (const SCEV *Op : AddRec->operands()) |
| 1302 | Operands.push_back(getTruncateExpr(Op, Ty)); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 1303 | return getAddRecExpr(Operands, AddRec->getLoop(), SCEV::FlagAnyWrap); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1304 | } |
| 1305 | |
Dan Gohman | 89dd42a | 2010-06-25 18:47:08 +0000 | [diff] [blame] | 1306 | // The cast wasn't folded; create an explicit cast node. We can reuse |
| 1307 | // the existing insert position since if we get here, we won't have |
| 1308 | // made any changes which would invalidate it. |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 1309 | SCEV *S = new (SCEVAllocator) SCEVTruncateExpr(ID.Intern(SCEVAllocator), |
| 1310 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1311 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 1312 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1313 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1314 | } |
| 1315 | |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1316 | // Get the limit of a recurrence such that incrementing by Step cannot cause |
| 1317 | // signed overflow as long as the value of the recurrence within the |
| 1318 | // loop does not exceed this limit before incrementing. |
| 1319 | static const SCEV *getSignedOverflowLimitForStep(const SCEV *Step, |
| 1320 | ICmpInst::Predicate *Pred, |
| 1321 | ScalarEvolution *SE) { |
| 1322 | unsigned BitWidth = SE->getTypeSizeInBits(Step->getType()); |
| 1323 | if (SE->isKnownPositive(Step)) { |
| 1324 | *Pred = ICmpInst::ICMP_SLT; |
| 1325 | return SE->getConstant(APInt::getSignedMinValue(BitWidth) - |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 1326 | SE->getSignedRangeMax(Step)); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1327 | } |
| 1328 | if (SE->isKnownNegative(Step)) { |
| 1329 | *Pred = ICmpInst::ICMP_SGT; |
| 1330 | return SE->getConstant(APInt::getSignedMaxValue(BitWidth) - |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 1331 | SE->getSignedRangeMin(Step)); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1332 | } |
| 1333 | return nullptr; |
| 1334 | } |
| 1335 | |
| 1336 | // Get the limit of a recurrence such that incrementing by Step cannot cause |
| 1337 | // unsigned overflow as long as the value of the recurrence within the loop does |
| 1338 | // not exceed this limit before incrementing. |
| 1339 | static const SCEV *getUnsignedOverflowLimitForStep(const SCEV *Step, |
| 1340 | ICmpInst::Predicate *Pred, |
| 1341 | ScalarEvolution *SE) { |
| 1342 | unsigned BitWidth = SE->getTypeSizeInBits(Step->getType()); |
| 1343 | *Pred = ICmpInst::ICMP_ULT; |
| 1344 | |
| 1345 | return SE->getConstant(APInt::getMinValue(BitWidth) - |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 1346 | SE->getUnsignedRangeMax(Step)); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1347 | } |
| 1348 | |
| 1349 | namespace { |
| 1350 | |
| 1351 | struct ExtendOpTraitsBase { |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1352 | typedef const SCEV *(ScalarEvolution::*GetExtendExprTy)(const SCEV *, Type *, |
| 1353 | unsigned); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1354 | }; |
| 1355 | |
| 1356 | // Used to make code generic over signed and unsigned overflow. |
| 1357 | template <typename ExtendOp> struct ExtendOpTraits { |
| 1358 | // Members present: |
| 1359 | // |
| 1360 | // static const SCEV::NoWrapFlags WrapType; |
| 1361 | // |
| 1362 | // static const ExtendOpTraitsBase::GetExtendExprTy GetExtendExpr; |
| 1363 | // |
| 1364 | // static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1365 | // ICmpInst::Predicate *Pred, |
| 1366 | // ScalarEvolution *SE); |
| 1367 | }; |
| 1368 | |
| 1369 | template <> |
| 1370 | struct ExtendOpTraits<SCEVSignExtendExpr> : public ExtendOpTraitsBase { |
| 1371 | static const SCEV::NoWrapFlags WrapType = SCEV::FlagNSW; |
| 1372 | |
| 1373 | static const GetExtendExprTy GetExtendExpr; |
| 1374 | |
| 1375 | static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1376 | ICmpInst::Predicate *Pred, |
| 1377 | ScalarEvolution *SE) { |
| 1378 | return getSignedOverflowLimitForStep(Step, Pred, SE); |
| 1379 | } |
| 1380 | }; |
| 1381 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1382 | const ExtendOpTraitsBase::GetExtendExprTy ExtendOpTraits< |
| 1383 | SCEVSignExtendExpr>::GetExtendExpr = &ScalarEvolution::getSignExtendExpr; |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1384 | |
| 1385 | template <> |
| 1386 | struct ExtendOpTraits<SCEVZeroExtendExpr> : public ExtendOpTraitsBase { |
| 1387 | static const SCEV::NoWrapFlags WrapType = SCEV::FlagNUW; |
| 1388 | |
| 1389 | static const GetExtendExprTy GetExtendExpr; |
| 1390 | |
| 1391 | static const SCEV *getOverflowLimitForStep(const SCEV *Step, |
| 1392 | ICmpInst::Predicate *Pred, |
| 1393 | ScalarEvolution *SE) { |
| 1394 | return getUnsignedOverflowLimitForStep(Step, Pred, SE); |
| 1395 | } |
| 1396 | }; |
| 1397 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1398 | const ExtendOpTraitsBase::GetExtendExprTy ExtendOpTraits< |
| 1399 | SCEVZeroExtendExpr>::GetExtendExpr = &ScalarEvolution::getZeroExtendExpr; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 1400 | |
| 1401 | } // end anonymous namespace |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1402 | |
| 1403 | // The recurrence AR has been shown to have no signed/unsigned wrap or something |
| 1404 | // close to it. Typically, if we can prove NSW/NUW for AR, then we can just as |
| 1405 | // easily prove NSW/NUW for its preincrement or postincrement sibling. This |
| 1406 | // allows normalizing a sign/zero extended AddRec as such: {sext/zext(Step + |
| 1407 | // Start),+,Step} => {(Step + sext/zext(Start),+,Step} As a result, the |
| 1408 | // expression "Step + sext/zext(PreIncAR)" is congruent with |
| 1409 | // "sext/zext(PostIncAR)" |
| 1410 | template <typename ExtendOpTy> |
| 1411 | static const SCEV *getPreStartForExtend(const SCEVAddRecExpr *AR, Type *Ty, |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1412 | ScalarEvolution *SE, unsigned Depth) { |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1413 | auto WrapType = ExtendOpTraits<ExtendOpTy>::WrapType; |
| 1414 | auto GetExtendExpr = ExtendOpTraits<ExtendOpTy>::GetExtendExpr; |
| 1415 | |
| 1416 | const Loop *L = AR->getLoop(); |
| 1417 | const SCEV *Start = AR->getStart(); |
| 1418 | const SCEV *Step = AR->getStepRecurrence(*SE); |
| 1419 | |
| 1420 | // Check for a simple looking step prior to loop entry. |
| 1421 | const SCEVAddExpr *SA = dyn_cast<SCEVAddExpr>(Start); |
| 1422 | if (!SA) |
| 1423 | return nullptr; |
| 1424 | |
| 1425 | // Create an AddExpr for "PreStart" after subtracting Step. Full SCEV |
| 1426 | // subtraction is expensive. For this purpose, perform a quick and dirty |
| 1427 | // difference, by checking for Step in the operand list. |
| 1428 | SmallVector<const SCEV *, 4> DiffOps; |
| 1429 | for (const SCEV *Op : SA->operands()) |
| 1430 | if (Op != Step) |
| 1431 | DiffOps.push_back(Op); |
| 1432 | |
| 1433 | if (DiffOps.size() == SA->getNumOperands()) |
| 1434 | return nullptr; |
| 1435 | |
| 1436 | // Try to prove `WrapType` (SCEV::FlagNSW or SCEV::FlagNUW) on `PreStart` + |
| 1437 | // `Step`: |
| 1438 | |
| 1439 | // 1. NSW/NUW flags on the step increment. |
Sanjoy Das | 0714e3e | 2015-10-23 06:33:47 +0000 | [diff] [blame] | 1440 | auto PreStartFlags = |
| 1441 | ScalarEvolution::maskFlags(SA->getNoWrapFlags(), SCEV::FlagNUW); |
| 1442 | const SCEV *PreStart = SE->getAddExpr(DiffOps, PreStartFlags); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1443 | const SCEVAddRecExpr *PreAR = dyn_cast<SCEVAddRecExpr>( |
| 1444 | SE->getAddRecExpr(PreStart, Step, L, SCEV::FlagAnyWrap)); |
| 1445 | |
Sanjoy Das | b14010d | 2015-02-24 01:02:42 +0000 | [diff] [blame] | 1446 | // "{S,+,X} is <nsw>/<nuw>" and "the backedge is taken at least once" implies |
| 1447 | // "S+X does not sign/unsign-overflow". |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1448 | // |
| 1449 | |
Sanjoy Das | b14010d | 2015-02-24 01:02:42 +0000 | [diff] [blame] | 1450 | const SCEV *BECount = SE->getBackedgeTakenCount(L); |
| 1451 | if (PreAR && PreAR->getNoWrapFlags(WrapType) && |
| 1452 | !isa<SCEVCouldNotCompute>(BECount) && SE->isKnownPositive(BECount)) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1453 | return PreStart; |
| 1454 | |
| 1455 | // 2. Direct overflow check on the step operation's expression. |
| 1456 | unsigned BitWidth = SE->getTypeSizeInBits(AR->getType()); |
| 1457 | Type *WideTy = IntegerType::get(SE->getContext(), BitWidth * 2); |
| 1458 | const SCEV *OperandExtendedStart = |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1459 | SE->getAddExpr((SE->*GetExtendExpr)(PreStart, WideTy, Depth), |
| 1460 | (SE->*GetExtendExpr)(Step, WideTy, Depth)); |
| 1461 | if ((SE->*GetExtendExpr)(Start, WideTy, Depth) == OperandExtendedStart) { |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1462 | if (PreAR && AR->getNoWrapFlags(WrapType)) { |
| 1463 | // If we know `AR` == {`PreStart`+`Step`,+,`Step`} is `WrapType` (FlagNSW |
| 1464 | // or FlagNUW) and that `PreStart` + `Step` is `WrapType` too, then |
| 1465 | // `PreAR` == {`PreStart`,+,`Step`} is also `WrapType`. Cache this fact. |
| 1466 | const_cast<SCEVAddRecExpr *>(PreAR)->setNoWrapFlags(WrapType); |
| 1467 | } |
| 1468 | return PreStart; |
| 1469 | } |
| 1470 | |
| 1471 | // 3. Loop precondition. |
| 1472 | ICmpInst::Predicate Pred; |
| 1473 | const SCEV *OverflowLimit = |
| 1474 | ExtendOpTraits<ExtendOpTy>::getOverflowLimitForStep(Step, &Pred, SE); |
| 1475 | |
| 1476 | if (OverflowLimit && |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 1477 | SE->isLoopEntryGuardedByCond(L, Pred, PreStart, OverflowLimit)) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1478 | return PreStart; |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 1479 | |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1480 | return nullptr; |
| 1481 | } |
| 1482 | |
| 1483 | // Get the normalized zero or sign extended expression for this AddRec's Start. |
| 1484 | template <typename ExtendOpTy> |
| 1485 | static const SCEV *getExtendAddRecStart(const SCEVAddRecExpr *AR, Type *Ty, |
Wei Mi | 8c40533 | 2017-04-17 20:40:05 +0000 | [diff] [blame] | 1486 | ScalarEvolution *SE, |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1487 | unsigned Depth) { |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1488 | auto GetExtendExpr = ExtendOpTraits<ExtendOpTy>::GetExtendExpr; |
| 1489 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1490 | const SCEV *PreStart = getPreStartForExtend<ExtendOpTy>(AR, Ty, SE, Depth); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1491 | if (!PreStart) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1492 | return (SE->*GetExtendExpr)(AR->getStart(), Ty, Depth); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1493 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1494 | return SE->getAddExpr((SE->*GetExtendExpr)(AR->getStepRecurrence(*SE), Ty, |
| 1495 | Depth), |
| 1496 | (SE->*GetExtendExpr)(PreStart, Ty, Depth)); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1497 | } |
| 1498 | |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1499 | // Try to prove away overflow by looking at "nearby" add recurrences. A |
| 1500 | // motivating example for this rule: if we know `{0,+,4}` is `ult` `-1` and it |
| 1501 | // does not itself wrap then we can conclude that `{1,+,4}` is `nuw`. |
| 1502 | // |
| 1503 | // Formally: |
| 1504 | // |
| 1505 | // {S,+,X} == {S-T,+,X} + T |
| 1506 | // => Ext({S,+,X}) == Ext({S-T,+,X} + T) |
| 1507 | // |
| 1508 | // If ({S-T,+,X} + T) does not overflow ... (1) |
| 1509 | // |
| 1510 | // RHS == Ext({S-T,+,X} + T) == Ext({S-T,+,X}) + Ext(T) |
| 1511 | // |
| 1512 | // If {S-T,+,X} does not overflow ... (2) |
| 1513 | // |
| 1514 | // RHS == Ext({S-T,+,X}) + Ext(T) == {Ext(S-T),+,Ext(X)} + Ext(T) |
| 1515 | // == {Ext(S-T)+Ext(T),+,Ext(X)} |
| 1516 | // |
| 1517 | // If (S-T)+T does not overflow ... (3) |
| 1518 | // |
| 1519 | // RHS == {Ext(S-T)+Ext(T),+,Ext(X)} == {Ext(S-T+T),+,Ext(X)} |
| 1520 | // == {Ext(S),+,Ext(X)} == LHS |
| 1521 | // |
| 1522 | // Thus, if (1), (2) and (3) are true for some T, then |
| 1523 | // Ext({S,+,X}) == {Ext(S),+,Ext(X)} |
| 1524 | // |
| 1525 | // (3) is implied by (1) -- "(S-T)+T does not overflow" is simply "({S-T,+,X}+T) |
| 1526 | // does not overflow" restricted to the 0th iteration. Therefore we only need |
| 1527 | // to check for (1) and (2). |
| 1528 | // |
| 1529 | // In the current context, S is `Start`, X is `Step`, Ext is `ExtendOpTy` and T |
| 1530 | // is `Delta` (defined below). |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1531 | template <typename ExtendOpTy> |
| 1532 | bool ScalarEvolution::proveNoWrapByVaryingStart(const SCEV *Start, |
| 1533 | const SCEV *Step, |
| 1534 | const Loop *L) { |
| 1535 | auto WrapType = ExtendOpTraits<ExtendOpTy>::WrapType; |
| 1536 | |
| 1537 | // We restrict `Start` to a constant to prevent SCEV from spending too much |
| 1538 | // time here. It is correct (but more expensive) to continue with a |
| 1539 | // non-constant `Start` and do a general SCEV subtraction to compute |
| 1540 | // `PreStart` below. |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1541 | const SCEVConstant *StartC = dyn_cast<SCEVConstant>(Start); |
| 1542 | if (!StartC) |
| 1543 | return false; |
| 1544 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 1545 | APInt StartAI = StartC->getAPInt(); |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1546 | |
| 1547 | for (unsigned Delta : {-2, -1, 1, 2}) { |
| 1548 | const SCEV *PreStart = getConstant(StartAI - Delta); |
| 1549 | |
Sanjoy Das | 4280110 | 2015-10-23 06:57:21 +0000 | [diff] [blame] | 1550 | FoldingSetNodeID ID; |
| 1551 | ID.AddInteger(scAddRecExpr); |
| 1552 | ID.AddPointer(PreStart); |
| 1553 | ID.AddPointer(Step); |
| 1554 | ID.AddPointer(L); |
| 1555 | void *IP = nullptr; |
| 1556 | const auto *PreAR = |
| 1557 | static_cast<SCEVAddRecExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 1558 | |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1559 | // Give up if we don't already have the add recurrence we need because |
| 1560 | // actually constructing an add recurrence is relatively expensive. |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1561 | if (PreAR && PreAR->getNoWrapFlags(WrapType)) { // proves (2) |
| 1562 | const SCEV *DeltaS = getConstant(StartC->getType(), Delta); |
| 1563 | ICmpInst::Predicate Pred = ICmpInst::BAD_ICMP_PREDICATE; |
| 1564 | const SCEV *Limit = ExtendOpTraits<ExtendOpTy>::getOverflowLimitForStep( |
| 1565 | DeltaS, &Pred, this); |
| 1566 | if (Limit && isKnownPredicate(Pred, PreAR, Limit)) // proves (1) |
| 1567 | return true; |
| 1568 | } |
| 1569 | } |
| 1570 | |
| 1571 | return false; |
| 1572 | } |
| 1573 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1574 | const SCEV * |
| 1575 | ScalarEvolution::getZeroExtendExpr(const SCEV *Op, Type *Ty, unsigned Depth) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1576 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
Dan Gohman | c1c2ba7 | 2009-04-16 19:25:55 +0000 | [diff] [blame] | 1577 | "This is not an extending conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1578 | assert(isSCEVable(Ty) && |
| 1579 | "This is not a conversion to a SCEVable type!"); |
| 1580 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | c1c2ba7 | 2009-04-16 19:25:55 +0000 | [diff] [blame] | 1581 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1582 | // Fold if the operand is constant. |
Dan Gohman | 5235cc2 | 2010-06-24 16:47:03 +0000 | [diff] [blame] | 1583 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
| 1584 | return getConstant( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1585 | cast<ConstantInt>(ConstantExpr::getZExt(SC->getValue(), Ty))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1586 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1587 | // zext(zext(x)) --> zext(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1588 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1589 | return getZeroExtendExpr(SZ->getOperand(), Ty, Depth + 1); |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1590 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1591 | // Before doing any expensive analysis, check to see if we've already |
| 1592 | // computed a SCEV for this Op and Ty. |
| 1593 | FoldingSetNodeID ID; |
| 1594 | ID.AddInteger(scZeroExtend); |
| 1595 | ID.AddPointer(Op); |
| 1596 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1597 | void *IP = nullptr; |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1598 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1599 | if (Depth > MaxExtDepth) { |
| 1600 | SCEV *S = new (SCEVAllocator) SCEVZeroExtendExpr(ID.Intern(SCEVAllocator), |
| 1601 | Op, Ty); |
| 1602 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 1603 | addToLoopUseLists(S); |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1604 | return S; |
| 1605 | } |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1606 | |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1607 | // zext(trunc(x)) --> zext(x) or x or trunc(x) |
| 1608 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) { |
| 1609 | // It's possible the bits taken off by the truncate were all zero bits. If |
| 1610 | // so, we should be able to simplify this further. |
| 1611 | const SCEV *X = ST->getOperand(); |
| 1612 | ConstantRange CR = getUnsignedRange(X); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1613 | unsigned TruncBits = getTypeSizeInBits(ST->getType()); |
| 1614 | unsigned NewBits = getTypeSizeInBits(Ty); |
| 1615 | if (CR.truncate(TruncBits).zeroExtend(NewBits).contains( |
Nick Lewycky | d4192f7 | 2011-01-23 20:06:05 +0000 | [diff] [blame] | 1616 | CR.zextOrTrunc(NewBits))) |
| 1617 | return getTruncateOrZeroExtend(X, Ty); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1618 | } |
| 1619 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1620 | // If the input value is a chrec scev, and we can prove that the value |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1621 | // did not overflow the old, smaller, value, we can zero extend all of the |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1622 | // operands (often constants). This allows analysis of something like |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1623 | // this: for (unsigned char X = 0; X < 100; ++X) { int Y = X; } |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1624 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1625 | if (AR->isAffine()) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1626 | const SCEV *Start = AR->getStart(); |
| 1627 | const SCEV *Step = AR->getStepRecurrence(*this); |
| 1628 | unsigned BitWidth = getTypeSizeInBits(AR->getType()); |
| 1629 | const Loop *L = AR->getLoop(); |
| 1630 | |
Sanjoy Das | 724f5cf | 2016-03-03 18:31:29 +0000 | [diff] [blame] | 1631 | if (!AR->hasNoUnsignedWrap()) { |
| 1632 | auto NewFlags = proveNoWrapViaConstantRanges(AR); |
| 1633 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(NewFlags); |
| 1634 | } |
| 1635 | |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1636 | // If we have special knowledge that this addrec won't overflow, |
| 1637 | // we don't need to do any further analysis. |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 1638 | if (AR->hasNoUnsignedWrap()) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1639 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1640 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, Depth + 1), |
| 1641 | getZeroExtendExpr(Step, Ty, Depth + 1), L, AR->getNoWrapFlags()); |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1642 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1643 | // Check whether the backedge-taken count is SCEVCouldNotCompute. |
| 1644 | // Note that this serves two purposes: It filters out loops that are |
| 1645 | // simply not analyzable, and it covers the case where this code is |
| 1646 | // being called from within backedge-taken count analysis, such that |
| 1647 | // attempting to ask for the backedge-taken count would likely result |
| 1648 | // in infinite recursion. In the later case, the analysis code will |
| 1649 | // cope with a conservative value, and it will take care to purge |
| 1650 | // that value once it has finished. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1651 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(L); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1652 | if (!isa<SCEVCouldNotCompute>(MaxBECount)) { |
Dan Gohman | 95c5b0e | 2009-04-29 01:54:20 +0000 | [diff] [blame] | 1653 | // Manually compute the final value for AR, checking for |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1654 | // overflow. |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1655 | |
| 1656 | // Check whether the backedge-taken count can be losslessly casted to |
| 1657 | // the addrec's type. The count is always unsigned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1658 | const SCEV *CastedMaxBECount = |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1659 | getTruncateOrZeroExtend(MaxBECount, Start->getType()); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1660 | const SCEV *RecastedMaxBECount = |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1661 | getTruncateOrZeroExtend(CastedMaxBECount, MaxBECount->getType()); |
| 1662 | if (MaxBECount == RecastedMaxBECount) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1663 | Type *WideTy = IntegerType::get(getContext(), BitWidth * 2); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1664 | // Check whether Start+Step*MaxBECount has no unsigned overflow. |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1665 | const SCEV *ZMul = getMulExpr(CastedMaxBECount, Step, |
| 1666 | SCEV::FlagAnyWrap, Depth + 1); |
| 1667 | const SCEV *ZAdd = getZeroExtendExpr(getAddExpr(Start, ZMul, |
| 1668 | SCEV::FlagAnyWrap, |
| 1669 | Depth + 1), |
| 1670 | WideTy, Depth + 1); |
| 1671 | const SCEV *WideStart = getZeroExtendExpr(Start, WideTy, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1672 | const SCEV *WideMaxBECount = |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1673 | getZeroExtendExpr(CastedMaxBECount, WideTy, Depth + 1); |
| 1674 | const SCEV *OperandExtendedAdd = |
| 1675 | getAddExpr(WideStart, |
| 1676 | getMulExpr(WideMaxBECount, |
| 1677 | getZeroExtendExpr(Step, WideTy, Depth + 1), |
| 1678 | SCEV::FlagAnyWrap, Depth + 1), |
| 1679 | SCEV::FlagAnyWrap, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1680 | if (ZAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1681 | // Cache knowledge of AR NUW, which is propagated to this AddRec. |
| 1682 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1683 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1684 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1685 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, |
| 1686 | Depth + 1), |
| 1687 | getZeroExtendExpr(Step, Ty, Depth + 1), L, |
Wei Mi | 8c40533 | 2017-04-17 20:40:05 +0000 | [diff] [blame] | 1688 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1689 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1690 | // Similar to above, only this time treat the step value as signed. |
| 1691 | // This covers loops that count down. |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1692 | OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1693 | getAddExpr(WideStart, |
| 1694 | getMulExpr(WideMaxBECount, |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1695 | getSignExtendExpr(Step, WideTy, Depth + 1), |
| 1696 | SCEV::FlagAnyWrap, Depth + 1), |
| 1697 | SCEV::FlagAnyWrap, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1698 | if (ZAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1699 | // Cache knowledge of AR NW, which is propagated to this AddRec. |
| 1700 | // Negative step causes unsigned wrap, but it still can't self-wrap. |
| 1701 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1702 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1703 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1704 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, |
| 1705 | Depth + 1), |
| 1706 | getSignExtendExpr(Step, Ty, Depth + 1), L, |
| 1707 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1708 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1709 | } |
Sanjoy Das | f5d40d5 | 2016-05-17 17:51:14 +0000 | [diff] [blame] | 1710 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1711 | |
Sanjoy Das | f5d40d5 | 2016-05-17 17:51:14 +0000 | [diff] [blame] | 1712 | // Normally, in the cases we can prove no-overflow via a |
| 1713 | // backedge guarding condition, we can also compute a backedge |
| 1714 | // taken count for the loop. The exceptions are assumptions and |
| 1715 | // guards present in the loop -- SCEV is not great at exploiting |
| 1716 | // these to compute max backedge taken counts, but can still use |
| 1717 | // these to prove lack of overflow. Use this fact to avoid |
| 1718 | // doing extra work that may not pay off. |
| 1719 | if (!isa<SCEVCouldNotCompute>(MaxBECount) || HasGuards || |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1720 | !AC.assumptions().empty()) { |
Sanjoy Das | f5d40d5 | 2016-05-17 17:51:14 +0000 | [diff] [blame] | 1721 | // If the backedge is guarded by a comparison with the pre-inc |
| 1722 | // value the addrec is safe. Also, if the entry is guarded by |
| 1723 | // a comparison with the start value and the backedge is |
| 1724 | // guarded by a comparison with the post-inc value, the addrec |
| 1725 | // is safe. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1726 | if (isKnownPositive(Step)) { |
| 1727 | const SCEV *N = getConstant(APInt::getMinValue(BitWidth) - |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 1728 | getUnsignedRangeMax(Step)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1729 | if (isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_ULT, AR, N) || |
Dan Gohman | b50349a | 2010-04-11 19:27:13 +0000 | [diff] [blame] | 1730 | (isLoopEntryGuardedByCond(L, ICmpInst::ICMP_ULT, Start, N) && |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1731 | isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_ULT, |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1732 | AR->getPostIncExpr(*this), N))) { |
Sanjoy Das | f5d40d5 | 2016-05-17 17:51:14 +0000 | [diff] [blame] | 1733 | // Cache knowledge of AR NUW, which is propagated to this |
| 1734 | // AddRec. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1735 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1736 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1737 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1738 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, |
| 1739 | Depth + 1), |
| 1740 | getZeroExtendExpr(Step, Ty, Depth + 1), L, |
Wei Mi | 8c40533 | 2017-04-17 20:40:05 +0000 | [diff] [blame] | 1741 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1742 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1743 | } else if (isKnownNegative(Step)) { |
| 1744 | const SCEV *N = getConstant(APInt::getMaxValue(BitWidth) - |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 1745 | getSignedRangeMin(Step)); |
Dan Gohman | 5f18c54 | 2010-05-04 01:11:15 +0000 | [diff] [blame] | 1746 | if (isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_UGT, AR, N) || |
| 1747 | (isLoopEntryGuardedByCond(L, ICmpInst::ICMP_UGT, Start, N) && |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1748 | isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_UGT, |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1749 | AR->getPostIncExpr(*this), N))) { |
Sanjoy Das | f5d40d5 | 2016-05-17 17:51:14 +0000 | [diff] [blame] | 1750 | // Cache knowledge of AR NW, which is propagated to this |
| 1751 | // AddRec. Negative step causes unsigned wrap, but it |
| 1752 | // still can't self-wrap. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1753 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
| 1754 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1755 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1756 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, |
| 1757 | Depth + 1), |
| 1758 | getSignExtendExpr(Step, Ty, Depth + 1), L, |
| 1759 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1760 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1761 | } |
| 1762 | } |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1763 | |
| 1764 | if (proveNoWrapByVaryingStart<SCEVZeroExtendExpr>(Start, Step, L)) { |
| 1765 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW); |
| 1766 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1767 | getExtendAddRecStart<SCEVZeroExtendExpr>(AR, Ty, this, Depth + 1), |
| 1768 | getZeroExtendExpr(Step, Ty, Depth + 1), L, AR->getNoWrapFlags()); |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 1769 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1770 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1771 | |
Sanjoy Das | eeca9f6 | 2015-10-22 19:57:38 +0000 | [diff] [blame] | 1772 | if (auto *SA = dyn_cast<SCEVAddExpr>(Op)) { |
| 1773 | // zext((A + B + ...)<nuw>) --> (zext(A) + zext(B) + ...)<nuw> |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 1774 | if (SA->hasNoUnsignedWrap()) { |
Sanjoy Das | eeca9f6 | 2015-10-22 19:57:38 +0000 | [diff] [blame] | 1775 | // If the addition does not unsign overflow then we can, by definition, |
| 1776 | // commute the zero extension with the addition operation. |
| 1777 | SmallVector<const SCEV *, 4> Ops; |
| 1778 | for (const auto *Op : SA->operands()) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1779 | Ops.push_back(getZeroExtendExpr(Op, Ty, Depth + 1)); |
| 1780 | return getAddExpr(Ops, SCEV::FlagNUW, Depth + 1); |
Sanjoy Das | eeca9f6 | 2015-10-22 19:57:38 +0000 | [diff] [blame] | 1781 | } |
| 1782 | } |
| 1783 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1784 | // The cast wasn't folded; create an explicit cast node. |
| 1785 | // Recompute the insert position, as it may have been invalidated. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1786 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 1787 | SCEV *S = new (SCEVAllocator) SCEVZeroExtendExpr(ID.Intern(SCEVAllocator), |
| 1788 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1789 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 1790 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 1791 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 1792 | } |
| 1793 | |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1794 | const SCEV * |
| 1795 | ScalarEvolution::getSignExtendExpr(const SCEV *Op, Type *Ty, unsigned Depth) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 1796 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1797 | "This is not an extending conversion!"); |
Dan Gohman | 194e42c | 2009-05-01 16:44:18 +0000 | [diff] [blame] | 1798 | assert(isSCEVable(Ty) && |
| 1799 | "This is not a conversion to a SCEVable type!"); |
| 1800 | Ty = getEffectiveSCEVType(Ty); |
Dan Gohman | 413e91f | 2009-04-21 00:55:22 +0000 | [diff] [blame] | 1801 | |
Dan Gohman | 3423e72 | 2009-06-30 20:13:32 +0000 | [diff] [blame] | 1802 | // Fold if the operand is constant. |
Dan Gohman | 5235cc2 | 2010-06-24 16:47:03 +0000 | [diff] [blame] | 1803 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
| 1804 | return getConstant( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1805 | cast<ConstantInt>(ConstantExpr::getSExt(SC->getValue(), Ty))); |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1806 | |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1807 | // sext(sext(x)) --> sext(x) |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1808 | if (const SCEVSignExtendExpr *SS = dyn_cast<SCEVSignExtendExpr>(Op)) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1809 | return getSignExtendExpr(SS->getOperand(), Ty, Depth + 1); |
Dan Gohman | 79af854 | 2009-04-22 16:20:48 +0000 | [diff] [blame] | 1810 | |
Nick Lewycky | e9ea75e | 2011-01-19 15:56:12 +0000 | [diff] [blame] | 1811 | // sext(zext(x)) --> zext(x) |
| 1812 | if (const SCEVZeroExtendExpr *SZ = dyn_cast<SCEVZeroExtendExpr>(Op)) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1813 | return getZeroExtendExpr(SZ->getOperand(), Ty, Depth + 1); |
Nick Lewycky | e9ea75e | 2011-01-19 15:56:12 +0000 | [diff] [blame] | 1814 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1815 | // Before doing any expensive analysis, check to see if we've already |
| 1816 | // computed a SCEV for this Op and Ty. |
| 1817 | FoldingSetNodeID ID; |
| 1818 | ID.AddInteger(scSignExtend); |
| 1819 | ID.AddPointer(Op); |
| 1820 | ID.AddPointer(Ty); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1821 | void *IP = nullptr; |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1822 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1823 | // Limit recursion depth. |
| 1824 | if (Depth > MaxExtDepth) { |
| 1825 | SCEV *S = new (SCEVAllocator) SCEVSignExtendExpr(ID.Intern(SCEVAllocator), |
| 1826 | Op, Ty); |
| 1827 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 1828 | addToLoopUseLists(S); |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1829 | return S; |
| 1830 | } |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 1831 | |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1832 | // sext(trunc(x)) --> sext(x) or x or trunc(x) |
| 1833 | if (const SCEVTruncateExpr *ST = dyn_cast<SCEVTruncateExpr>(Op)) { |
| 1834 | // It's possible the bits taken off by the truncate were all sign bits. If |
| 1835 | // so, we should be able to simplify this further. |
| 1836 | const SCEV *X = ST->getOperand(); |
| 1837 | ConstantRange CR = getSignedRange(X); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1838 | unsigned TruncBits = getTypeSizeInBits(ST->getType()); |
| 1839 | unsigned NewBits = getTypeSizeInBits(Ty); |
| 1840 | if (CR.truncate(TruncBits).signExtend(NewBits).contains( |
Nick Lewycky | d4192f7 | 2011-01-23 20:06:05 +0000 | [diff] [blame] | 1841 | CR.sextOrTrunc(NewBits))) |
| 1842 | return getTruncateOrSignExtend(X, Ty); |
Nick Lewycky | bc98f5b | 2011-01-23 06:20:19 +0000 | [diff] [blame] | 1843 | } |
| 1844 | |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1845 | // sext(C1 + (C2 * x)) --> C1 + sext(C2 * x) if C1 < C2 |
Sanjoy Das | 1195dbe | 2015-10-08 03:45:58 +0000 | [diff] [blame] | 1846 | if (auto *SA = dyn_cast<SCEVAddExpr>(Op)) { |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1847 | if (SA->getNumOperands() == 2) { |
Sanjoy Das | 1195dbe | 2015-10-08 03:45:58 +0000 | [diff] [blame] | 1848 | auto *SC1 = dyn_cast<SCEVConstant>(SA->getOperand(0)); |
| 1849 | auto *SMul = dyn_cast<SCEVMulExpr>(SA->getOperand(1)); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1850 | if (SMul && SC1) { |
Sanjoy Das | 1195dbe | 2015-10-08 03:45:58 +0000 | [diff] [blame] | 1851 | if (auto *SC2 = dyn_cast<SCEVConstant>(SMul->getOperand(0))) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 1852 | const APInt &C1 = SC1->getAPInt(); |
| 1853 | const APInt &C2 = SC2->getAPInt(); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1854 | if (C1.isStrictlyPositive() && C2.isStrictlyPositive() && |
Michael Zolotukhin | 265dfa4 | 2014-05-26 14:49:46 +0000 | [diff] [blame] | 1855 | C2.ugt(C1) && C2.isPowerOf2()) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1856 | return getAddExpr(getSignExtendExpr(SC1, Ty, Depth + 1), |
| 1857 | getSignExtendExpr(SMul, Ty, Depth + 1), |
| 1858 | SCEV::FlagAnyWrap, Depth + 1); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1859 | } |
| 1860 | } |
| 1861 | } |
Sanjoy Das | a060e60 | 2015-10-22 19:57:25 +0000 | [diff] [blame] | 1862 | |
| 1863 | // sext((A + B + ...)<nsw>) --> (sext(A) + sext(B) + ...)<nsw> |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 1864 | if (SA->hasNoSignedWrap()) { |
Sanjoy Das | a060e60 | 2015-10-22 19:57:25 +0000 | [diff] [blame] | 1865 | // If the addition does not sign overflow then we can, by definition, |
| 1866 | // commute the sign extension with the addition operation. |
| 1867 | SmallVector<const SCEV *, 4> Ops; |
| 1868 | for (const auto *Op : SA->operands()) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1869 | Ops.push_back(getSignExtendExpr(Op, Ty, Depth + 1)); |
| 1870 | return getAddExpr(Ops, SCEV::FlagNSW, Depth + 1); |
Sanjoy Das | a060e60 | 2015-10-22 19:57:25 +0000 | [diff] [blame] | 1871 | } |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 1872 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1873 | // If the input value is a chrec scev, and we can prove that the value |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1874 | // did not overflow the old, smaller, value, we can sign extend all of the |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1875 | // operands (often constants). This allows analysis of something like |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 1876 | // this: for (signed char X = 0; X < 100; ++X) { int Y = X; } |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 1877 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1878 | if (AR->isAffine()) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1879 | const SCEV *Start = AR->getStart(); |
| 1880 | const SCEV *Step = AR->getStepRecurrence(*this); |
| 1881 | unsigned BitWidth = getTypeSizeInBits(AR->getType()); |
| 1882 | const Loop *L = AR->getLoop(); |
| 1883 | |
Sanjoy Das | 724f5cf | 2016-03-03 18:31:29 +0000 | [diff] [blame] | 1884 | if (!AR->hasNoSignedWrap()) { |
| 1885 | auto NewFlags = proveNoWrapViaConstantRanges(AR); |
| 1886 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(NewFlags); |
| 1887 | } |
| 1888 | |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1889 | // If we have special knowledge that this addrec won't overflow, |
| 1890 | // we don't need to do any further analysis. |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 1891 | if (AR->hasNoSignedWrap()) |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1892 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1893 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this, Depth + 1), |
| 1894 | getSignExtendExpr(Step, Ty, Depth + 1), L, SCEV::FlagNSW); |
Dan Gohman | 62ef6a7 | 2009-07-25 01:22:26 +0000 | [diff] [blame] | 1895 | |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1896 | // Check whether the backedge-taken count is SCEVCouldNotCompute. |
| 1897 | // Note that this serves two purposes: It filters out loops that are |
| 1898 | // simply not analyzable, and it covers the case where this code is |
| 1899 | // being called from within backedge-taken count analysis, such that |
| 1900 | // attempting to ask for the backedge-taken count would likely result |
| 1901 | // in infinite recursion. In the later case, the analysis code will |
| 1902 | // cope with a conservative value, and it will take care to purge |
| 1903 | // that value once it has finished. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1904 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(L); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1905 | if (!isa<SCEVCouldNotCompute>(MaxBECount)) { |
Dan Gohman | 95c5b0e | 2009-04-29 01:54:20 +0000 | [diff] [blame] | 1906 | // Manually compute the final value for AR, checking for |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1907 | // overflow. |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 1908 | |
| 1909 | // Check whether the backedge-taken count can be losslessly casted to |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1910 | // the addrec's type. The count is always unsigned. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1911 | const SCEV *CastedMaxBECount = |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1912 | getTruncateOrZeroExtend(MaxBECount, Start->getType()); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 1913 | const SCEV *RecastedMaxBECount = |
Dan Gohman | 4fc3668 | 2009-05-18 15:58:39 +0000 | [diff] [blame] | 1914 | getTruncateOrZeroExtend(CastedMaxBECount, MaxBECount->getType()); |
| 1915 | if (MaxBECount == RecastedMaxBECount) { |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1916 | Type *WideTy = IntegerType::get(getContext(), BitWidth * 2); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 1917 | // Check whether Start+Step*MaxBECount has no signed overflow. |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1918 | const SCEV *SMul = getMulExpr(CastedMaxBECount, Step, |
| 1919 | SCEV::FlagAnyWrap, Depth + 1); |
| 1920 | const SCEV *SAdd = getSignExtendExpr(getAddExpr(Start, SMul, |
| 1921 | SCEV::FlagAnyWrap, |
| 1922 | Depth + 1), |
| 1923 | WideTy, Depth + 1); |
| 1924 | const SCEV *WideStart = getSignExtendExpr(Start, WideTy, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1925 | const SCEV *WideMaxBECount = |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1926 | getZeroExtendExpr(CastedMaxBECount, WideTy, Depth + 1); |
| 1927 | const SCEV *OperandExtendedAdd = |
| 1928 | getAddExpr(WideStart, |
| 1929 | getMulExpr(WideMaxBECount, |
| 1930 | getSignExtendExpr(Step, WideTy, Depth + 1), |
| 1931 | SCEV::FlagAnyWrap, Depth + 1), |
| 1932 | SCEV::FlagAnyWrap, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1933 | if (SAdd == OperandExtendedAdd) { |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1934 | // Cache knowledge of AR NSW, which is propagated to this AddRec. |
| 1935 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
Dan Gohman | 494dac3 | 2009-04-29 22:28:28 +0000 | [diff] [blame] | 1936 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1937 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1938 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this, |
| 1939 | Depth + 1), |
| 1940 | getSignExtendExpr(Step, Ty, Depth + 1), L, |
Wei Mi | 8c40533 | 2017-04-17 20:40:05 +0000 | [diff] [blame] | 1941 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1942 | } |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1943 | // Similar to above, only this time treat the step value as unsigned. |
| 1944 | // This covers loops that count up with an unsigned step. |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1945 | OperandExtendedAdd = |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1946 | getAddExpr(WideStart, |
| 1947 | getMulExpr(WideMaxBECount, |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1948 | getZeroExtendExpr(Step, WideTy, Depth + 1), |
| 1949 | SCEV::FlagAnyWrap, Depth + 1), |
| 1950 | SCEV::FlagAnyWrap, Depth + 1); |
Nuno Lopes | c2a170e | 2012-05-15 20:20:14 +0000 | [diff] [blame] | 1951 | if (SAdd == OperandExtendedAdd) { |
Sanjoy Das | bf5d870 | 2015-02-09 18:34:55 +0000 | [diff] [blame] | 1952 | // If AR wraps around then |
| 1953 | // |
| 1954 | // abs(Step) * MaxBECount > unsigned-max(AR->getType()) |
| 1955 | // => SAdd != OperandExtendedAdd |
| 1956 | // |
| 1957 | // Thus (AR is not NW => SAdd != OperandExtendedAdd) <=> |
| 1958 | // (SAdd == OperandExtendedAdd => AR is NW) |
| 1959 | |
| 1960 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW); |
| 1961 | |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 1962 | // Return the expression with the addrec on the outside. |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1963 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1964 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this, |
| 1965 | Depth + 1), |
| 1966 | getZeroExtendExpr(Step, Ty, Depth + 1), L, |
| 1967 | AR->getNoWrapFlags()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 1968 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1969 | } |
Sanjoy Das | 787c246 | 2016-05-11 17:41:26 +0000 | [diff] [blame] | 1970 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 1971 | |
Sanjoy Das | 787c246 | 2016-05-11 17:41:26 +0000 | [diff] [blame] | 1972 | // Normally, in the cases we can prove no-overflow via a |
| 1973 | // backedge guarding condition, we can also compute a backedge |
| 1974 | // taken count for the loop. The exceptions are assumptions and |
| 1975 | // guards present in the loop -- SCEV is not great at exploiting |
| 1976 | // these to compute max backedge taken counts, but can still use |
| 1977 | // these to prove lack of overflow. Use this fact to avoid |
| 1978 | // doing extra work that may not pay off. |
| 1979 | |
| 1980 | if (!isa<SCEVCouldNotCompute>(MaxBECount) || HasGuards || |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1981 | !AC.assumptions().empty()) { |
Sanjoy Das | 787c246 | 2016-05-11 17:41:26 +0000 | [diff] [blame] | 1982 | // If the backedge is guarded by a comparison with the pre-inc |
| 1983 | // value the addrec is safe. Also, if the entry is guarded by |
| 1984 | // a comparison with the start value and the backedge is |
| 1985 | // guarded by a comparison with the post-inc value, the addrec |
| 1986 | // is safe. |
Andrew Trick | 812276e | 2011-05-31 21:17:47 +0000 | [diff] [blame] | 1987 | ICmpInst::Predicate Pred; |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1988 | const SCEV *OverflowLimit = |
| 1989 | getSignedOverflowLimitForStep(Step, &Pred, this); |
Andrew Trick | 812276e | 2011-05-31 21:17:47 +0000 | [diff] [blame] | 1990 | if (OverflowLimit && |
| 1991 | (isLoopBackedgeGuardedByCond(L, Pred, AR, OverflowLimit) || |
| 1992 | (isLoopEntryGuardedByCond(L, Pred, Start, OverflowLimit) && |
| 1993 | isLoopBackedgeGuardedByCond(L, Pred, AR->getPostIncExpr(*this), |
| 1994 | OverflowLimit)))) { |
| 1995 | // Cache knowledge of AR NSW, then propagate NSW to the wide AddRec. |
| 1996 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
Sanjoy Das | 4153f47 | 2015-02-18 01:47:07 +0000 | [diff] [blame] | 1997 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 1998 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this, Depth + 1), |
| 1999 | getSignExtendExpr(Step, Ty, Depth + 1), L, AR->getNoWrapFlags()); |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 2000 | } |
| 2001 | } |
Sanjoy Das | 787c246 | 2016-05-11 17:41:26 +0000 | [diff] [blame] | 2002 | |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 2003 | // If Start and Step are constants, check if we can apply this |
| 2004 | // transformation: |
| 2005 | // sext{C1,+,C2} --> C1 + sext{0,+,C2} if C1 < C2 |
Sanjoy Das | 1195dbe | 2015-10-08 03:45:58 +0000 | [diff] [blame] | 2006 | auto *SC1 = dyn_cast<SCEVConstant>(Start); |
| 2007 | auto *SC2 = dyn_cast<SCEVConstant>(Step); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 2008 | if (SC1 && SC2) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2009 | const APInt &C1 = SC1->getAPInt(); |
| 2010 | const APInt &C2 = SC2->getAPInt(); |
Michael Zolotukhin | 265dfa4 | 2014-05-26 14:49:46 +0000 | [diff] [blame] | 2011 | if (C1.isStrictlyPositive() && C2.isStrictlyPositive() && C2.ugt(C1) && |
| 2012 | C2.isPowerOf2()) { |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 2013 | Start = getSignExtendExpr(Start, Ty, Depth + 1); |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 2014 | const SCEV *NewAR = getAddRecExpr(getZero(AR->getType()), Step, L, |
| 2015 | AR->getNoWrapFlags()); |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 2016 | return getAddExpr(Start, getSignExtendExpr(NewAR, Ty, Depth + 1), |
| 2017 | SCEV::FlagAnyWrap, Depth + 1); |
Michael Zolotukhin | d4c7246 | 2014-05-24 08:09:57 +0000 | [diff] [blame] | 2018 | } |
| 2019 | } |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 2020 | |
| 2021 | if (proveNoWrapByVaryingStart<SCEVSignExtendExpr>(Start, Step, L)) { |
| 2022 | const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNSW); |
| 2023 | return getAddRecExpr( |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 2024 | getExtendAddRecStart<SCEVSignExtendExpr>(AR, Ty, this, Depth + 1), |
| 2025 | getSignExtendExpr(Step, Ty, Depth + 1), L, AR->getNoWrapFlags()); |
Sanjoy Das | 9e2c501 | 2015-03-04 22:24:17 +0000 | [diff] [blame] | 2026 | } |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 2027 | } |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 2028 | |
Sanjoy Das | 11ef606 | 2016-03-03 18:31:23 +0000 | [diff] [blame] | 2029 | // If the input value is provably positive and we could not simplify |
| 2030 | // away the sext build a zext instead. |
| 2031 | if (isKnownNonNegative(Op)) |
Max Kazantsev | 8d0322e | 2017-06-30 05:04:09 +0000 | [diff] [blame] | 2032 | return getZeroExtendExpr(Op, Ty, Depth + 1); |
Sanjoy Das | 11ef606 | 2016-03-03 18:31:23 +0000 | [diff] [blame] | 2033 | |
Dan Gohman | 74a0ba1 | 2009-07-13 20:55:53 +0000 | [diff] [blame] | 2034 | // The cast wasn't folded; create an explicit cast node. |
| 2035 | // Recompute the insert position, as it may have been invalidated. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2036 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 2037 | SCEV *S = new (SCEVAllocator) SCEVSignExtendExpr(ID.Intern(SCEVAllocator), |
| 2038 | Op, Ty); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2039 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 2040 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2041 | return S; |
Dan Gohman | cb9e09a | 2007-06-15 14:38:12 +0000 | [diff] [blame] | 2042 | } |
| 2043 | |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2044 | /// getAnyExtendExpr - Return a SCEV for the given operand extended with |
| 2045 | /// unspecified bits out to the given type. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2046 | const SCEV *ScalarEvolution::getAnyExtendExpr(const SCEV *Op, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2047 | Type *Ty) { |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2048 | assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) && |
| 2049 | "This is not an extending conversion!"); |
| 2050 | assert(isSCEVable(Ty) && |
| 2051 | "This is not a conversion to a SCEVable type!"); |
| 2052 | Ty = getEffectiveSCEVType(Ty); |
| 2053 | |
| 2054 | // Sign-extend negative constants. |
| 2055 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(Op)) |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2056 | if (SC->getAPInt().isNegative()) |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2057 | return getSignExtendExpr(Op, Ty); |
| 2058 | |
| 2059 | // Peel off a truncate cast. |
| 2060 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(Op)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2061 | const SCEV *NewOp = T->getOperand(); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2062 | if (getTypeSizeInBits(NewOp->getType()) < getTypeSizeInBits(Ty)) |
| 2063 | return getAnyExtendExpr(NewOp, Ty); |
| 2064 | return getTruncateOrNoop(NewOp, Ty); |
| 2065 | } |
| 2066 | |
| 2067 | // Next try a zext cast. If the cast is folded, use it. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2068 | const SCEV *ZExt = getZeroExtendExpr(Op, Ty); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2069 | if (!isa<SCEVZeroExtendExpr>(ZExt)) |
| 2070 | return ZExt; |
| 2071 | |
| 2072 | // Next try a sext cast. If the cast is folded, use it. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2073 | const SCEV *SExt = getSignExtendExpr(Op, Ty); |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2074 | if (!isa<SCEVSignExtendExpr>(SExt)) |
| 2075 | return SExt; |
| 2076 | |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2077 | // Force the cast to be folded into the operands of an addrec. |
| 2078 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Op)) { |
| 2079 | SmallVector<const SCEV *, 4> Ops; |
Tobias Grosser | 924221c | 2014-05-07 06:07:47 +0000 | [diff] [blame] | 2080 | for (const SCEV *Op : AR->operands()) |
| 2081 | Ops.push_back(getAnyExtendExpr(Op, Ty)); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2082 | return getAddRecExpr(Ops, AR->getLoop(), SCEV::FlagNW); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2083 | } |
| 2084 | |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 2085 | // If the expression is obviously signed, use the sext cast value. |
| 2086 | if (isa<SCEVSMaxExpr>(Op)) |
| 2087 | return SExt; |
| 2088 | |
| 2089 | // Absent any other information, use the zext cast value. |
| 2090 | return ZExt; |
| 2091 | } |
| 2092 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 2093 | /// Process the given Ops list, which is a list of operands to be added under |
| 2094 | /// the given scale, update the given map. This is a helper function for |
| 2095 | /// getAddRecExpr. As an example of what it does, given a sequence of operands |
| 2096 | /// that would form an add expression like this: |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2097 | /// |
Tobias Grosser | ba49e42 | 2014-03-05 10:37:17 +0000 | [diff] [blame] | 2098 | /// m + n + 13 + (A * (o + p + (B * (q + m + 29)))) + r + (-1 * r) |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2099 | /// |
| 2100 | /// where A and B are constants, update the map with these values: |
| 2101 | /// |
| 2102 | /// (m, 1+A*B), (n, 1), (o, A), (p, A), (q, A*B), (r, 0) |
| 2103 | /// |
| 2104 | /// and add 13 + A*B*29 to AccumulatedConstant. |
| 2105 | /// This will allow getAddRecExpr to produce this: |
| 2106 | /// |
| 2107 | /// 13+A*B*29 + n + (m * (1+A*B)) + ((o + p) * A) + (q * A*B) |
| 2108 | /// |
| 2109 | /// This form often exposes folding opportunities that are hidden in |
| 2110 | /// the original operand list. |
| 2111 | /// |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 2112 | /// Return true iff it appears that any interesting folding opportunities |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2113 | /// may be exposed. This helps getAddRecExpr short-circuit extra work in |
| 2114 | /// the common case where no interesting opportunities are present, and |
| 2115 | /// is also used as a check to avoid infinite recursion. |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2116 | static bool |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2117 | CollectAddOperandsWithScales(DenseMap<const SCEV *, APInt> &M, |
Craig Topper | 2cd5ff8 | 2013-07-11 16:22:38 +0000 | [diff] [blame] | 2118 | SmallVectorImpl<const SCEV *> &NewOps, |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2119 | APInt &AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2120 | const SCEV *const *Ops, size_t NumOperands, |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2121 | const APInt &Scale, |
| 2122 | ScalarEvolution &SE) { |
| 2123 | bool Interesting = false; |
| 2124 | |
Dan Gohman | 4507304 | 2010-06-18 19:12:32 +0000 | [diff] [blame] | 2125 | // Iterate over the add operands. They are sorted, with constants first. |
| 2126 | unsigned i = 0; |
| 2127 | while (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[i])) { |
| 2128 | ++i; |
| 2129 | // Pull a buried constant out to the outside. |
| 2130 | if (Scale != 1 || AccumulatedConstant != 0 || C->getValue()->isZero()) |
| 2131 | Interesting = true; |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2132 | AccumulatedConstant += Scale * C->getAPInt(); |
Dan Gohman | 4507304 | 2010-06-18 19:12:32 +0000 | [diff] [blame] | 2133 | } |
| 2134 | |
| 2135 | // Next comes everything else. We're especially interested in multiplies |
| 2136 | // here, but they're in the middle, so just visit the rest with one loop. |
| 2137 | for (; i != NumOperands; ++i) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2138 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Ops[i]); |
| 2139 | if (Mul && isa<SCEVConstant>(Mul->getOperand(0))) { |
| 2140 | APInt NewScale = |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2141 | Scale * cast<SCEVConstant>(Mul->getOperand(0))->getAPInt(); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2142 | if (Mul->getNumOperands() == 2 && isa<SCEVAddExpr>(Mul->getOperand(1))) { |
| 2143 | // A multiplication of a constant with another add; recurse. |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2144 | const SCEVAddExpr *Add = cast<SCEVAddExpr>(Mul->getOperand(1)); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2145 | Interesting |= |
| 2146 | CollectAddOperandsWithScales(M, NewOps, AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2147 | Add->op_begin(), Add->getNumOperands(), |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2148 | NewScale, SE); |
| 2149 | } else { |
| 2150 | // A multiplication of a constant with some other value. Update |
| 2151 | // the map. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2152 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin()+1, Mul->op_end()); |
| 2153 | const SCEV *Key = SE.getMulExpr(MulOps); |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 2154 | auto Pair = M.insert({Key, NewScale}); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2155 | if (Pair.second) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2156 | NewOps.push_back(Pair.first->first); |
| 2157 | } else { |
| 2158 | Pair.first->second += NewScale; |
| 2159 | // The map already had an entry for this value, which may indicate |
| 2160 | // a folding opportunity. |
| 2161 | Interesting = true; |
| 2162 | } |
| 2163 | } |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2164 | } else { |
| 2165 | // An ordinary operand. Update the map. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2166 | std::pair<DenseMap<const SCEV *, APInt>::iterator, bool> Pair = |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 2167 | M.insert({Ops[i], Scale}); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2168 | if (Pair.second) { |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2169 | NewOps.push_back(Pair.first->first); |
| 2170 | } else { |
| 2171 | Pair.first->second += Scale; |
| 2172 | // The map already had an entry for this value, which may indicate |
| 2173 | // a folding opportunity. |
| 2174 | Interesting = true; |
| 2175 | } |
| 2176 | } |
| 2177 | } |
| 2178 | |
| 2179 | return Interesting; |
| 2180 | } |
| 2181 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2182 | // We're trying to construct a SCEV of type `Type' with `Ops' as operands and |
| 2183 | // `OldFlags' as can't-wrap behavior. Infer a more aggressive set of |
| 2184 | // can't-overflow flags for the operation if possible. |
| 2185 | static SCEV::NoWrapFlags |
| 2186 | StrengthenNoWrapFlags(ScalarEvolution *SE, SCEVTypes Type, |
| 2187 | const SmallVectorImpl<const SCEV *> &Ops, |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2188 | SCEV::NoWrapFlags Flags) { |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2189 | using namespace std::placeholders; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 2190 | |
| 2191 | using OBO = OverflowingBinaryOperator; |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2192 | |
| 2193 | bool CanAnalyze = |
| 2194 | Type == scAddExpr || Type == scAddRecExpr || Type == scMulExpr; |
| 2195 | (void)CanAnalyze; |
| 2196 | assert(CanAnalyze && "don't call from other places!"); |
| 2197 | |
| 2198 | int SignOrUnsignMask = SCEV::FlagNUW | SCEV::FlagNSW; |
| 2199 | SCEV::NoWrapFlags SignOrUnsignWrap = |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2200 | ScalarEvolution::maskFlags(Flags, SignOrUnsignMask); |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2201 | |
| 2202 | // If FlagNSW is true and all the operands are non-negative, infer FlagNUW. |
Sanjoy Das | 9b0015f | 2015-11-29 23:40:57 +0000 | [diff] [blame] | 2203 | auto IsKnownNonNegative = [&](const SCEV *S) { |
| 2204 | return SE->isKnownNonNegative(S); |
| 2205 | }; |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2206 | |
Sanjoy Das | 3b827c7 | 2015-11-29 23:40:53 +0000 | [diff] [blame] | 2207 | if (SignOrUnsignWrap == SCEV::FlagNSW && all_of(Ops, IsKnownNonNegative)) |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2208 | Flags = |
| 2209 | ScalarEvolution::setFlags(Flags, (SCEV::NoWrapFlags)SignOrUnsignMask); |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2210 | |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2211 | SignOrUnsignWrap = ScalarEvolution::maskFlags(Flags, SignOrUnsignMask); |
| 2212 | |
| 2213 | if (SignOrUnsignWrap != SignOrUnsignMask && Type == scAddExpr && |
| 2214 | Ops.size() == 2 && isa<SCEVConstant>(Ops[0])) { |
| 2215 | |
| 2216 | // (A + C) --> (A + C)<nsw> if the addition does not sign overflow |
| 2217 | // (A + C) --> (A + C)<nuw> if the addition does not unsign overflow |
| 2218 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2219 | const APInt &C = cast<SCEVConstant>(Ops[0])->getAPInt(); |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2220 | if (!(SignOrUnsignWrap & SCEV::FlagNSW)) { |
Sanjoy Das | 5079f62 | 2016-02-22 16:13:02 +0000 | [diff] [blame] | 2221 | auto NSWRegion = ConstantRange::makeGuaranteedNoWrapRegion( |
| 2222 | Instruction::Add, C, OBO::NoSignedWrap); |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2223 | if (NSWRegion.contains(SE->getSignedRange(Ops[1]))) |
| 2224 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNSW); |
| 2225 | } |
| 2226 | if (!(SignOrUnsignWrap & SCEV::FlagNUW)) { |
Sanjoy Das | 5079f62 | 2016-02-22 16:13:02 +0000 | [diff] [blame] | 2227 | auto NUWRegion = ConstantRange::makeGuaranteedNoWrapRegion( |
| 2228 | Instruction::Add, C, OBO::NoUnsignedWrap); |
Sanjoy Das | 8f27415 | 2015-10-22 19:57:19 +0000 | [diff] [blame] | 2229 | if (NUWRegion.contains(SE->getUnsignedRange(Ops[1]))) |
| 2230 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNUW); |
| 2231 | } |
| 2232 | } |
| 2233 | |
| 2234 | return Flags; |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 2235 | } |
| 2236 | |
Max Kazantsev | d8fe3eb | 2017-05-30 10:54:58 +0000 | [diff] [blame] | 2237 | bool ScalarEvolution::isAvailableAtLoopEntry(const SCEV *S, const Loop *L) { |
Max Kazantsev | 4145032 | 2017-05-26 06:47:04 +0000 | [diff] [blame] | 2238 | if (!isLoopInvariant(S, L)) |
| 2239 | return false; |
| 2240 | // If a value depends on a SCEVUnknown which is defined after the loop, we |
| 2241 | // conservatively assume that we cannot calculate it at the loop's entry. |
| 2242 | struct FindDominatedSCEVUnknown { |
| 2243 | bool Found = false; |
| 2244 | const Loop *L; |
| 2245 | DominatorTree &DT; |
| 2246 | LoopInfo &LI; |
| 2247 | |
| 2248 | FindDominatedSCEVUnknown(const Loop *L, DominatorTree &DT, LoopInfo &LI) |
| 2249 | : L(L), DT(DT), LI(LI) {} |
| 2250 | |
| 2251 | bool checkSCEVUnknown(const SCEVUnknown *SU) { |
| 2252 | if (auto *I = dyn_cast<Instruction>(SU->getValue())) { |
| 2253 | if (DT.dominates(L->getHeader(), I->getParent())) |
| 2254 | Found = true; |
| 2255 | else |
| 2256 | assert(DT.dominates(I->getParent(), L->getHeader()) && |
| 2257 | "No dominance relationship between SCEV and loop?"); |
| 2258 | } |
| 2259 | return false; |
| 2260 | } |
| 2261 | |
| 2262 | bool follow(const SCEV *S) { |
| 2263 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
| 2264 | case scConstant: |
| 2265 | return false; |
| 2266 | case scAddRecExpr: |
| 2267 | case scTruncate: |
| 2268 | case scZeroExtend: |
| 2269 | case scSignExtend: |
| 2270 | case scAddExpr: |
| 2271 | case scMulExpr: |
| 2272 | case scUMaxExpr: |
| 2273 | case scSMaxExpr: |
| 2274 | case scUDivExpr: |
| 2275 | return true; |
| 2276 | case scUnknown: |
| 2277 | return checkSCEVUnknown(cast<SCEVUnknown>(S)); |
| 2278 | case scCouldNotCompute: |
| 2279 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
| 2280 | } |
| 2281 | return false; |
| 2282 | } |
| 2283 | |
| 2284 | bool isDone() { return Found; } |
| 2285 | }; |
| 2286 | |
| 2287 | FindDominatedSCEVUnknown FSU(L, DT, LI); |
| 2288 | SCEVTraversal<FindDominatedSCEVUnknown> ST(FSU); |
| 2289 | ST.visitAll(S); |
| 2290 | return !FSU.Found; |
| 2291 | } |
| 2292 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 2293 | /// Get a canonical add expression, or something simpler if possible. |
Dan Gohman | 816fe0a | 2009-10-09 00:10:36 +0000 | [diff] [blame] | 2294 | const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops, |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2295 | SCEV::NoWrapFlags Flags, |
| 2296 | unsigned Depth) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2297 | assert(!(Flags & ~(SCEV::FlagNUW | SCEV::FlagNSW)) && |
| 2298 | "only nuw or nsw allowed"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2299 | assert(!Ops.empty() && "Cannot get empty add!"); |
Chris Lattner | 74498e1 | 2004-04-07 16:16:11 +0000 | [diff] [blame] | 2300 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2301 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2302 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2303 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | 9136d9f | 2010-06-18 19:09:27 +0000 | [diff] [blame] | 2304 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2305 | "SCEVAddExpr operand types don't match!"); |
| 2306 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2307 | |
| 2308 | // Sort by complexity, this groups all similar expression types together. |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 2309 | GroupByComplexity(Ops, &LI, DT); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2310 | |
Sanjoy Das | 6489561 | 2015-10-09 02:44:45 +0000 | [diff] [blame] | 2311 | Flags = StrengthenNoWrapFlags(this, scAddExpr, Ops, Flags); |
| 2312 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2313 | // If there are any constants, fold them together. |
| 2314 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2315 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2316 | ++Idx; |
Chris Lattner | 74498e1 | 2004-04-07 16:16:11 +0000 | [diff] [blame] | 2317 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2318 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2319 | // We found two constants, fold them together! |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2320 | Ops[0] = getConstant(LHSC->getAPInt() + RHSC->getAPInt()); |
Dan Gohman | 011cf68 | 2009-06-14 22:53:57 +0000 | [diff] [blame] | 2321 | if (Ops.size() == 2) return Ops[0]; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 2322 | Ops.erase(Ops.begin()+1); // Erase the folded element |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 2323 | LHSC = cast<SCEVConstant>(Ops[0]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2324 | } |
| 2325 | |
| 2326 | // If we are left with a constant zero being added, strip it off. |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 2327 | if (LHSC->getValue()->isZero()) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2328 | Ops.erase(Ops.begin()); |
| 2329 | --Idx; |
| 2330 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2331 | |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 2332 | if (Ops.size() == 1) return Ops[0]; |
| 2333 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 2334 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2335 | // Limit recursion calls depth. |
| 2336 | if (Depth > MaxArithDepth) |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2337 | return getOrCreateAddExpr(Ops, Flags); |
| 2338 | |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2339 | // Okay, check to see if the same value occurs in the operand list more than |
Reid Kleckner | 30422ee | 2016-12-12 18:52:32 +0000 | [diff] [blame] | 2340 | // once. If so, merge them together into an multiply expression. Since we |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2341 | // sorted the list, these values are required to be adjacent. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2342 | Type *Ty = Ops[0]->getType(); |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2343 | bool FoundMatch = false; |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2344 | for (unsigned i = 0, e = Ops.size(); i != e-1; ++i) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2345 | if (Ops[i] == Ops[i+1]) { // X + Y + Y --> X + Y*2 |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2346 | // Scan ahead to count how many equal operands there are. |
| 2347 | unsigned Count = 2; |
| 2348 | while (i+Count != e && Ops[i+Count] == Ops[i]) |
| 2349 | ++Count; |
| 2350 | // Merge the values into a multiply. |
| 2351 | const SCEV *Scale = getConstant(Ty, Count); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2352 | const SCEV *Mul = getMulExpr(Scale, Ops[i], SCEV::FlagAnyWrap, Depth + 1); |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2353 | if (Ops.size() == Count) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2354 | return Mul; |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2355 | Ops[i] = Mul; |
Dan Gohman | 15871f2 | 2010-08-27 21:39:59 +0000 | [diff] [blame] | 2356 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+Count); |
Dan Gohman | fe22f1d | 2010-08-28 00:39:27 +0000 | [diff] [blame] | 2357 | --i; e -= Count - 1; |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2358 | FoundMatch = true; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2359 | } |
Dan Gohman | e67b287 | 2010-08-12 14:46:54 +0000 | [diff] [blame] | 2360 | if (FoundMatch) |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2361 | return getAddExpr(Ops, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2362 | |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2363 | // Check for truncates. If all the operands are truncated from the same |
| 2364 | // type, see if factoring out the truncate would permit the result to be |
Daniel Neilson | 1341ac2 | 2017-09-22 15:47:57 +0000 | [diff] [blame] | 2365 | // folded. eg., n*trunc(x) + m*trunc(y) --> trunc(trunc(m)*x + trunc(n)*y) |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2366 | // if the contents of the resulting outer trunc fold to something simple. |
Daniel Neilson | 1341ac2 | 2017-09-22 15:47:57 +0000 | [diff] [blame] | 2367 | auto FindTruncSrcType = [&]() -> Type * { |
| 2368 | // We're ultimately looking to fold an addrec of truncs and muls of only |
| 2369 | // constants and truncs, so if we find any other types of SCEV |
| 2370 | // as operands of the addrec then we bail and return nullptr here. |
| 2371 | // Otherwise, we return the type of the operand of a trunc that we find. |
| 2372 | if (auto *T = dyn_cast<SCEVTruncateExpr>(Ops[Idx])) |
| 2373 | return T->getOperand()->getType(); |
| 2374 | if (const auto *Mul = dyn_cast<SCEVMulExpr>(Ops[Idx])) { |
| 2375 | const auto *LastOp = Mul->getOperand(Mul->getNumOperands() - 1); |
| 2376 | if (const auto *T = dyn_cast<SCEVTruncateExpr>(LastOp)) |
| 2377 | return T->getOperand()->getType(); |
| 2378 | } |
| 2379 | return nullptr; |
| 2380 | }; |
| 2381 | if (auto *SrcType = FindTruncSrcType()) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2382 | SmallVector<const SCEV *, 8> LargeOps; |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2383 | bool Ok = true; |
| 2384 | // Check all the operands to see if they can be represented in the |
| 2385 | // source type of the truncate. |
| 2386 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) { |
| 2387 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(Ops[i])) { |
| 2388 | if (T->getOperand()->getType() != SrcType) { |
| 2389 | Ok = false; |
| 2390 | break; |
| 2391 | } |
| 2392 | LargeOps.push_back(T->getOperand()); |
| 2393 | } else if (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[i])) { |
Dan Gohman | ff3174e | 2010-04-23 01:51:29 +0000 | [diff] [blame] | 2394 | LargeOps.push_back(getAnyExtendExpr(C, SrcType)); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2395 | } else if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(Ops[i])) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2396 | SmallVector<const SCEV *, 8> LargeMulOps; |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2397 | for (unsigned j = 0, f = M->getNumOperands(); j != f && Ok; ++j) { |
| 2398 | if (const SCEVTruncateExpr *T = |
| 2399 | dyn_cast<SCEVTruncateExpr>(M->getOperand(j))) { |
| 2400 | if (T->getOperand()->getType() != SrcType) { |
| 2401 | Ok = false; |
| 2402 | break; |
| 2403 | } |
| 2404 | LargeMulOps.push_back(T->getOperand()); |
Sanjoy Das | 6391459 | 2015-10-18 00:29:20 +0000 | [diff] [blame] | 2405 | } else if (const auto *C = dyn_cast<SCEVConstant>(M->getOperand(j))) { |
Dan Gohman | ff3174e | 2010-04-23 01:51:29 +0000 | [diff] [blame] | 2406 | LargeMulOps.push_back(getAnyExtendExpr(C, SrcType)); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2407 | } else { |
| 2408 | Ok = false; |
| 2409 | break; |
| 2410 | } |
| 2411 | } |
| 2412 | if (Ok) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2413 | LargeOps.push_back(getMulExpr(LargeMulOps, SCEV::FlagAnyWrap, Depth + 1)); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2414 | } else { |
| 2415 | Ok = false; |
| 2416 | break; |
| 2417 | } |
| 2418 | } |
| 2419 | if (Ok) { |
| 2420 | // Evaluate the expression in the larger type. |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2421 | const SCEV *Fold = getAddExpr(LargeOps, Flags, Depth + 1); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2422 | // If it folds to something simple, use it. Otherwise, don't. |
| 2423 | if (isa<SCEVConstant>(Fold) || isa<SCEVUnknown>(Fold)) |
Daniel Neilson | 1341ac2 | 2017-09-22 15:47:57 +0000 | [diff] [blame] | 2424 | return getTruncateExpr(Fold, Ty); |
Dan Gohman | 2e55cc5 | 2009-05-08 21:03:19 +0000 | [diff] [blame] | 2425 | } |
| 2426 | } |
| 2427 | |
| 2428 | // Skip past any other cast SCEVs. |
Dan Gohman | eed125f | 2007-06-18 19:30:09 +0000 | [diff] [blame] | 2429 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddExpr) |
| 2430 | ++Idx; |
| 2431 | |
| 2432 | // If there are add operands they would be next. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2433 | if (Idx < Ops.size()) { |
| 2434 | bool DeletedAdd = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2435 | while (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[Idx])) { |
Daniil Fukalov | b09dac5 | 2017-01-26 13:33:17 +0000 | [diff] [blame] | 2436 | if (Ops.size() > AddOpsInlineThreshold || |
| 2437 | Add->getNumOperands() > AddOpsInlineThreshold) |
| 2438 | break; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2439 | // If we have an add, expand the add operands onto the end of the operands |
| 2440 | // list. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2441 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 2442 | Ops.append(Add->op_begin(), Add->op_end()); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2443 | DeletedAdd = true; |
| 2444 | } |
| 2445 | |
| 2446 | // If we deleted at least one add, we added operands to the end of the list, |
| 2447 | // and they are not necessarily sorted. Recurse to resort and resimplify |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 2448 | // any operands we just acquired. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2449 | if (DeletedAdd) |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2450 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2451 | } |
| 2452 | |
| 2453 | // Skip over the add expression until we get to a multiply. |
| 2454 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scMulExpr) |
| 2455 | ++Idx; |
| 2456 | |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2457 | // Check to see if there are any folding opportunities present with |
| 2458 | // operands multiplied by constant values. |
| 2459 | if (Idx < Ops.size() && isa<SCEVMulExpr>(Ops[Idx])) { |
| 2460 | uint64_t BitWidth = getTypeSizeInBits(Ty); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2461 | DenseMap<const SCEV *, APInt> M; |
| 2462 | SmallVector<const SCEV *, 8> NewOps; |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2463 | APInt AccumulatedConstant(BitWidth, 0); |
| 2464 | if (CollectAddOperandsWithScales(M, NewOps, AccumulatedConstant, |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2465 | Ops.data(), Ops.size(), |
| 2466 | APInt(BitWidth, 1), *this)) { |
Sanjoy Das | 7d75267 | 2015-12-08 04:32:54 +0000 | [diff] [blame] | 2467 | struct APIntCompare { |
| 2468 | bool operator()(const APInt &LHS, const APInt &RHS) const { |
| 2469 | return LHS.ult(RHS); |
| 2470 | } |
| 2471 | }; |
| 2472 | |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2473 | // Some interesting folding opportunity is present, so its worthwhile to |
| 2474 | // re-generate the operands list. Group the operands by constant scale, |
| 2475 | // to avoid multiplying by the same constant scale multiple times. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2476 | std::map<APInt, SmallVector<const SCEV *, 4>, APIntCompare> MulOpLists; |
Sanjoy Das | f25d25a | 2015-10-31 23:21:32 +0000 | [diff] [blame] | 2477 | for (const SCEV *NewOp : NewOps) |
| 2478 | MulOpLists[M.find(NewOp)->second].push_back(NewOp); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2479 | // Re-generate the operands list. |
| 2480 | Ops.clear(); |
| 2481 | if (AccumulatedConstant != 0) |
| 2482 | Ops.push_back(getConstant(AccumulatedConstant)); |
Sanjoy Das | f25d25a | 2015-10-31 23:21:32 +0000 | [diff] [blame] | 2483 | for (auto &MulOp : MulOpLists) |
| 2484 | if (MulOp.first != 0) |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2485 | Ops.push_back(getMulExpr( |
| 2486 | getConstant(MulOp.first), |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2487 | getAddExpr(MulOp.second, SCEV::FlagAnyWrap, Depth + 1), |
| 2488 | SCEV::FlagAnyWrap, Depth + 1)); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2489 | if (Ops.empty()) |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 2490 | return getZero(Ty); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2491 | if (Ops.size() == 1) |
| 2492 | return Ops[0]; |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2493 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Dan Gohman | 038d02e | 2009-06-14 22:58:51 +0000 | [diff] [blame] | 2494 | } |
| 2495 | } |
| 2496 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2497 | // If we are adding something to a multiply expression, make sure the |
| 2498 | // something is not already an operand of the multiply. If so, merge it into |
| 2499 | // the multiply. |
| 2500 | for (; Idx < Ops.size() && isa<SCEVMulExpr>(Ops[Idx]); ++Idx) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2501 | const SCEVMulExpr *Mul = cast<SCEVMulExpr>(Ops[Idx]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2502 | for (unsigned MulOp = 0, e = Mul->getNumOperands(); MulOp != e; ++MulOp) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2503 | const SCEV *MulOpSCEV = Mul->getOperand(MulOp); |
Dan Gohman | 157847f | 2010-08-12 14:52:55 +0000 | [diff] [blame] | 2504 | if (isa<SCEVConstant>(MulOpSCEV)) |
| 2505 | continue; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2506 | for (unsigned AddOp = 0, e = Ops.size(); AddOp != e; ++AddOp) |
Dan Gohman | 157847f | 2010-08-12 14:52:55 +0000 | [diff] [blame] | 2507 | if (MulOpSCEV == Ops[AddOp]) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2508 | // Fold W + X + (X * Y * Z) --> W + (X * ((Y*Z)+1)) |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2509 | const SCEV *InnerMul = Mul->getOperand(MulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2510 | if (Mul->getNumOperands() != 2) { |
| 2511 | // If the multiply has more than two operands, we must get the |
| 2512 | // Y*Z term. |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2513 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin(), |
| 2514 | Mul->op_begin()+MulOp); |
| 2515 | MulOps.append(Mul->op_begin()+MulOp+1, Mul->op_end()); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2516 | InnerMul = getMulExpr(MulOps, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2517 | } |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2518 | SmallVector<const SCEV *, 2> TwoOps = {getOne(Ty), InnerMul}; |
| 2519 | const SCEV *AddOne = getAddExpr(TwoOps, SCEV::FlagAnyWrap, Depth + 1); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2520 | const SCEV *OuterMul = getMulExpr(AddOne, MulOpSCEV, |
| 2521 | SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2522 | if (Ops.size() == 2) return OuterMul; |
| 2523 | if (AddOp < Idx) { |
| 2524 | Ops.erase(Ops.begin()+AddOp); |
| 2525 | Ops.erase(Ops.begin()+Idx-1); |
| 2526 | } else { |
| 2527 | Ops.erase(Ops.begin()+Idx); |
| 2528 | Ops.erase(Ops.begin()+AddOp-1); |
| 2529 | } |
| 2530 | Ops.push_back(OuterMul); |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2531 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2532 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 2533 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2534 | // Check this multiply against other multiplies being added together. |
| 2535 | for (unsigned OtherMulIdx = Idx+1; |
| 2536 | OtherMulIdx < Ops.size() && isa<SCEVMulExpr>(Ops[OtherMulIdx]); |
| 2537 | ++OtherMulIdx) { |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2538 | const SCEVMulExpr *OtherMul = cast<SCEVMulExpr>(Ops[OtherMulIdx]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2539 | // If MulOp occurs in OtherMul, we can fold the two multiplies |
| 2540 | // together. |
| 2541 | for (unsigned OMulOp = 0, e = OtherMul->getNumOperands(); |
| 2542 | OMulOp != e; ++OMulOp) |
| 2543 | if (OtherMul->getOperand(OMulOp) == MulOpSCEV) { |
| 2544 | // Fold X + (A*B*C) + (A*D*E) --> X + (A*(B*C+D*E)) |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2545 | const SCEV *InnerMul1 = Mul->getOperand(MulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2546 | if (Mul->getNumOperands() != 2) { |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2547 | SmallVector<const SCEV *, 4> MulOps(Mul->op_begin(), |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2548 | Mul->op_begin()+MulOp); |
| 2549 | MulOps.append(Mul->op_begin()+MulOp+1, Mul->op_end()); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2550 | InnerMul1 = getMulExpr(MulOps, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2551 | } |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2552 | const SCEV *InnerMul2 = OtherMul->getOperand(OMulOp == 0); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2553 | if (OtherMul->getNumOperands() != 2) { |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 2554 | SmallVector<const SCEV *, 4> MulOps(OtherMul->op_begin(), |
Dan Gohman | 797a1db | 2010-08-16 16:57:24 +0000 | [diff] [blame] | 2555 | OtherMul->op_begin()+OMulOp); |
| 2556 | MulOps.append(OtherMul->op_begin()+OMulOp+1, OtherMul->op_end()); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2557 | InnerMul2 = getMulExpr(MulOps, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2558 | } |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2559 | SmallVector<const SCEV *, 2> TwoOps = {InnerMul1, InnerMul2}; |
| 2560 | const SCEV *InnerMulSum = |
| 2561 | getAddExpr(TwoOps, SCEV::FlagAnyWrap, Depth + 1); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2562 | const SCEV *OuterMul = getMulExpr(MulOpSCEV, InnerMulSum, |
| 2563 | SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2564 | if (Ops.size() == 2) return OuterMul; |
Dan Gohman | aabfc52 | 2010-08-31 22:50:31 +0000 | [diff] [blame] | 2565 | Ops.erase(Ops.begin()+Idx); |
| 2566 | Ops.erase(Ops.begin()+OtherMulIdx-1); |
| 2567 | Ops.push_back(OuterMul); |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2568 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2569 | } |
| 2570 | } |
| 2571 | } |
| 2572 | } |
| 2573 | |
| 2574 | // If there are any add recurrences in the operands list, see if any other |
| 2575 | // added values are loop invariant. If so, we can fold them into the |
| 2576 | // recurrence. |
| 2577 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddRecExpr) |
| 2578 | ++Idx; |
| 2579 | |
| 2580 | // Scan over all recurrences, trying to fold loop invariants into them. |
| 2581 | for (; Idx < Ops.size() && isa<SCEVAddRecExpr>(Ops[Idx]); ++Idx) { |
| 2582 | // Scan all of the other operands to this add and add them to the vector if |
| 2583 | // they are loop invariant w.r.t. the recurrence. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2584 | SmallVector<const SCEV *, 8> LIOps; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2585 | const SCEVAddRecExpr *AddRec = cast<SCEVAddRecExpr>(Ops[Idx]); |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 2586 | const Loop *AddRecLoop = AddRec->getLoop(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2587 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
Max Kazantsev | d8fe3eb | 2017-05-30 10:54:58 +0000 | [diff] [blame] | 2588 | if (isAvailableAtLoopEntry(Ops[i], AddRecLoop)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2589 | LIOps.push_back(Ops[i]); |
| 2590 | Ops.erase(Ops.begin()+i); |
| 2591 | --i; --e; |
| 2592 | } |
| 2593 | |
| 2594 | // If we found some loop invariants, fold them into the recurrence. |
| 2595 | if (!LIOps.empty()) { |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 2596 | // NLI + LI + {Start,+,Step} --> NLI + {LI+Start,+,Step} |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2597 | LIOps.push_back(AddRec->getStart()); |
| 2598 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2599 | SmallVector<const SCEV *, 4> AddRecOps(AddRec->op_begin(), |
Dan Gohman | 7a2dab8 | 2009-12-18 03:57:04 +0000 | [diff] [blame] | 2600 | AddRec->op_end()); |
Oleg Ranevskyy | eb4ecca | 2016-05-25 13:01:33 +0000 | [diff] [blame] | 2601 | // This follows from the fact that the no-wrap flags on the outer add |
| 2602 | // expression are applicable on the 0th iteration, when the add recurrence |
| 2603 | // will be equal to its start value. |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2604 | AddRecOps[0] = getAddExpr(LIOps, Flags, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2605 | |
Dan Gohman | 1620613 | 2010-06-30 07:16:37 +0000 | [diff] [blame] | 2606 | // Build the new addrec. Propagate the NUW and NSW flags if both the |
Eric Christopher | 23bf3ba | 2011-01-11 09:02:09 +0000 | [diff] [blame] | 2607 | // outer add and the inner addrec are guaranteed to have no overflow. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 2608 | // Always propagate NW. |
| 2609 | Flags = AddRec->getNoWrapFlags(setFlags(Flags, SCEV::FlagNW)); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2610 | const SCEV *NewRec = getAddRecExpr(AddRecOps, AddRecLoop, Flags); |
Dan Gohman | 51f1305 | 2009-12-18 18:45:31 +0000 | [diff] [blame] | 2611 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2612 | // If all of the other operands were loop invariant, we are done. |
| 2613 | if (Ops.size() == 1) return NewRec; |
| 2614 | |
Nick Lewycky | db66b82 | 2011-09-06 05:08:09 +0000 | [diff] [blame] | 2615 | // Otherwise, add the folded AddRec by the non-invariant parts. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2616 | for (unsigned i = 0;; ++i) |
| 2617 | if (Ops[i] == AddRec) { |
| 2618 | Ops[i] = NewRec; |
| 2619 | break; |
| 2620 | } |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2621 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2622 | } |
| 2623 | |
| 2624 | // Okay, if there weren't any loop invariants to be folded, check to see if |
| 2625 | // there are multiple AddRec's with the same loop induction variable being |
| 2626 | // added together. If so, we can fold them. |
| 2627 | for (unsigned OtherIdx = Idx+1; |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2628 | OtherIdx < Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 2629 | ++OtherIdx) { |
| 2630 | // We expect the AddRecExpr's to be sorted in reverse dominance order, |
| 2631 | // so that the 1st found AddRecExpr is dominated by all others. |
| 2632 | assert(DT.dominates( |
| 2633 | cast<SCEVAddRecExpr>(Ops[OtherIdx])->getLoop()->getHeader(), |
| 2634 | AddRec->getLoop()->getHeader()) && |
| 2635 | "AddRecExprs are not sorted in reverse dominance order?"); |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2636 | if (AddRecLoop == cast<SCEVAddRecExpr>(Ops[OtherIdx])->getLoop()) { |
| 2637 | // Other + {A,+,B}<L> + {C,+,D}<L> --> Other + {A+C,+,B+D}<L> |
| 2638 | SmallVector<const SCEV *, 4> AddRecOps(AddRec->op_begin(), |
| 2639 | AddRec->op_end()); |
| 2640 | for (; OtherIdx != Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
Max Kazantsev | b67d344 | 2017-05-17 03:58:42 +0000 | [diff] [blame] | 2641 | ++OtherIdx) { |
| 2642 | const auto *OtherAddRec = cast<SCEVAddRecExpr>(Ops[OtherIdx]); |
| 2643 | if (OtherAddRec->getLoop() == AddRecLoop) { |
| 2644 | for (unsigned i = 0, e = OtherAddRec->getNumOperands(); |
| 2645 | i != e; ++i) { |
| 2646 | if (i >= AddRecOps.size()) { |
| 2647 | AddRecOps.append(OtherAddRec->op_begin()+i, |
| 2648 | OtherAddRec->op_end()); |
| 2649 | break; |
Dan Gohman | c866bf4 | 2010-08-27 20:45:56 +0000 | [diff] [blame] | 2650 | } |
Max Kazantsev | b67d344 | 2017-05-17 03:58:42 +0000 | [diff] [blame] | 2651 | SmallVector<const SCEV *, 2> TwoOps = { |
| 2652 | AddRecOps[i], OtherAddRec->getOperand(i)}; |
| 2653 | AddRecOps[i] = getAddExpr(TwoOps, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2654 | } |
Max Kazantsev | b67d344 | 2017-05-17 03:58:42 +0000 | [diff] [blame] | 2655 | Ops.erase(Ops.begin() + OtherIdx); --OtherIdx; |
| 2656 | } |
| 2657 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2658 | // Step size has changed, so we cannot guarantee no self-wraparound. |
| 2659 | Ops[Idx] = getAddRecExpr(AddRecOps, AddRecLoop, SCEV::FlagAnyWrap); |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2660 | return getAddExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2661 | } |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 2662 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2663 | |
| 2664 | // Otherwise couldn't fold anything into this recurrence. Move onto the |
| 2665 | // next one. |
| 2666 | } |
| 2667 | |
| 2668 | // Okay, it looks like we really DO need an add expr. Check to see if we |
| 2669 | // already have one, otherwise create a new one. |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2670 | return getOrCreateAddExpr(Ops, Flags); |
| 2671 | } |
| 2672 | |
| 2673 | const SCEV * |
| 2674 | ScalarEvolution::getOrCreateAddExpr(SmallVectorImpl<const SCEV *> &Ops, |
| 2675 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2676 | FoldingSetNodeID ID; |
| 2677 | ID.AddInteger(scAddExpr); |
Javed Absar | da30c30 | 2017-11-16 13:49:27 +0000 | [diff] [blame] | 2678 | for (const SCEV *Op : Ops) |
| 2679 | ID.AddPointer(Op); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2680 | void *IP = nullptr; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2681 | SCEVAddExpr *S = |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2682 | static_cast<SCEVAddExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2683 | if (!S) { |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 2684 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 2685 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Daniil Fukalov | 6378bdb | 2017-02-06 12:38:06 +0000 | [diff] [blame] | 2686 | S = new (SCEVAllocator) |
| 2687 | SCEVAddExpr(ID.Intern(SCEVAllocator), O, Ops.size()); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2688 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 2689 | addToLoopUseLists(S); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2690 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2691 | S->setNoWrapFlags(Flags); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 2692 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2693 | } |
| 2694 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2695 | const SCEV * |
| 2696 | ScalarEvolution::getOrCreateMulExpr(SmallVectorImpl<const SCEV *> &Ops, |
| 2697 | SCEV::NoWrapFlags Flags) { |
| 2698 | FoldingSetNodeID ID; |
| 2699 | ID.AddInteger(scMulExpr); |
| 2700 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 2701 | ID.AddPointer(Ops[i]); |
| 2702 | void *IP = nullptr; |
| 2703 | SCEVMulExpr *S = |
| 2704 | static_cast<SCEVMulExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 2705 | if (!S) { |
| 2706 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 2707 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
| 2708 | S = new (SCEVAllocator) SCEVMulExpr(ID.Intern(SCEVAllocator), |
| 2709 | O, Ops.size()); |
| 2710 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 2711 | addToLoopUseLists(S); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2712 | } |
| 2713 | S->setNoWrapFlags(Flags); |
| 2714 | return S; |
| 2715 | } |
| 2716 | |
Nick Lewycky | 287682e | 2011-10-04 06:51:26 +0000 | [diff] [blame] | 2717 | static uint64_t umul_ov(uint64_t i, uint64_t j, bool &Overflow) { |
| 2718 | uint64_t k = i*j; |
| 2719 | if (j > 1 && k / j != i) Overflow = true; |
| 2720 | return k; |
| 2721 | } |
| 2722 | |
| 2723 | /// Compute the result of "n choose k", the binomial coefficient. If an |
| 2724 | /// intermediate computation overflows, Overflow will be set and the return will |
Benjamin Kramer | bde9176 | 2012-06-02 10:20:22 +0000 | [diff] [blame] | 2725 | /// be garbage. Overflow is not cleared on absence of overflow. |
Nick Lewycky | 287682e | 2011-10-04 06:51:26 +0000 | [diff] [blame] | 2726 | static uint64_t Choose(uint64_t n, uint64_t k, bool &Overflow) { |
| 2727 | // We use the multiplicative formula: |
| 2728 | // n(n-1)(n-2)...(n-(k-1)) / k(k-1)(k-2)...1 . |
| 2729 | // At each iteration, we take the n-th term of the numeral and divide by the |
| 2730 | // (k-n)th term of the denominator. This division will always produce an |
| 2731 | // integral result, and helps reduce the chance of overflow in the |
| 2732 | // intermediate computations. However, we can still overflow even when the |
| 2733 | // final result would fit. |
| 2734 | |
| 2735 | if (n == 0 || n == k) return 1; |
| 2736 | if (k > n) return 0; |
| 2737 | |
| 2738 | if (k > n/2) |
| 2739 | k = n-k; |
| 2740 | |
| 2741 | uint64_t r = 1; |
| 2742 | for (uint64_t i = 1; i <= k; ++i) { |
| 2743 | r = umul_ov(r, n-(i-1), Overflow); |
| 2744 | r /= i; |
| 2745 | } |
| 2746 | return r; |
| 2747 | } |
| 2748 | |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2749 | /// Determine if any of the operands in this SCEV are a constant or if |
| 2750 | /// any of the add or multiply expressions in this SCEV contain a constant. |
Max Kazantsev | fa49695 | 2017-07-28 06:42:15 +0000 | [diff] [blame] | 2751 | static bool containsConstantInAddMulChain(const SCEV *StartExpr) { |
| 2752 | struct FindConstantInAddMulChain { |
| 2753 | bool FoundConstant = false; |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2754 | |
Max Kazantsev | fa49695 | 2017-07-28 06:42:15 +0000 | [diff] [blame] | 2755 | bool follow(const SCEV *S) { |
| 2756 | FoundConstant |= isa<SCEVConstant>(S); |
| 2757 | return isa<SCEVAddExpr>(S) || isa<SCEVMulExpr>(S); |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2758 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 2759 | |
Max Kazantsev | fa49695 | 2017-07-28 06:42:15 +0000 | [diff] [blame] | 2760 | bool isDone() const { |
| 2761 | return FoundConstant; |
| 2762 | } |
| 2763 | }; |
| 2764 | |
| 2765 | FindConstantInAddMulChain F; |
| 2766 | SCEVTraversal<FindConstantInAddMulChain> ST(F); |
| 2767 | ST.visitAll(StartExpr); |
| 2768 | return F.FoundConstant; |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2769 | } |
| 2770 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 2771 | /// Get a canonical multiply expression, or something simpler if possible. |
Dan Gohman | 816fe0a | 2009-10-09 00:10:36 +0000 | [diff] [blame] | 2772 | const SCEV *ScalarEvolution::getMulExpr(SmallVectorImpl<const SCEV *> &Ops, |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2773 | SCEV::NoWrapFlags Flags, |
| 2774 | unsigned Depth) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2775 | assert(Flags == maskFlags(Flags, SCEV::FlagNUW | SCEV::FlagNSW) && |
| 2776 | "only nuw or nsw allowed"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2777 | assert(!Ops.empty() && "Cannot get empty mul!"); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2778 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2779 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2780 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2781 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 2782 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 2783 | "SCEVMulExpr operand types don't match!"); |
| 2784 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2785 | |
| 2786 | // Sort by complexity, this groups all similar expression types together. |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 2787 | GroupByComplexity(Ops, &LI, DT); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2788 | |
Sanjoy Das | 6489561 | 2015-10-09 02:44:45 +0000 | [diff] [blame] | 2789 | Flags = StrengthenNoWrapFlags(this, scMulExpr, Ops, Flags); |
| 2790 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2791 | // Limit recursion calls depth. |
| 2792 | if (Depth > MaxArithDepth) |
| 2793 | return getOrCreateMulExpr(Ops, Flags); |
| 2794 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2795 | // If there are any constants, fold them together. |
| 2796 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2797 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2798 | |
| 2799 | // C1*(C2+V) -> C1*C2 + C1*V |
| 2800 | if (Ops.size() == 2) |
Nick Lewycky | 05044c2 | 2014-12-06 00:45:50 +0000 | [diff] [blame] | 2801 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[1])) |
| 2802 | // If any of Add's ops are Adds or Muls with a constant, |
| 2803 | // apply this transformation as well. |
| 2804 | if (Add->getNumOperands() == 2) |
Max Kazantsev | fa49695 | 2017-07-28 06:42:15 +0000 | [diff] [blame] | 2805 | // TODO: There are some cases where this transformation is not |
| 2806 | // profitable, for example: |
| 2807 | // Add = (C0 + X) * Y + Z. |
| 2808 | // Maybe the scope of this transformation should be narrowed down. |
| 2809 | if (containsConstantInAddMulChain(Add)) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2810 | return getAddExpr(getMulExpr(LHSC, Add->getOperand(0), |
| 2811 | SCEV::FlagAnyWrap, Depth + 1), |
| 2812 | getMulExpr(LHSC, Add->getOperand(1), |
| 2813 | SCEV::FlagAnyWrap, Depth + 1), |
| 2814 | SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2815 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2816 | ++Idx; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2817 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2818 | // We found two constants, fold them together! |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 2819 | ConstantInt *Fold = |
| 2820 | ConstantInt::get(getContext(), LHSC->getAPInt() * RHSC->getAPInt()); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 2821 | Ops[0] = getConstant(Fold); |
| 2822 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 2823 | if (Ops.size() == 1) return Ops[0]; |
| 2824 | LHSC = cast<SCEVConstant>(Ops[0]); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2825 | } |
| 2826 | |
| 2827 | // If we are left with a constant one being multiplied, strip it off. |
Craig Topper | ca2c876 | 2017-07-06 18:39:49 +0000 | [diff] [blame] | 2828 | if (cast<SCEVConstant>(Ops[0])->getValue()->isOne()) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2829 | Ops.erase(Ops.begin()); |
| 2830 | --Idx; |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 2831 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isZero()) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2832 | // If we have a multiply of zero, it will always be zero. |
| 2833 | return Ops[0]; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2834 | } else if (Ops[0]->isAllOnesValue()) { |
| 2835 | // If we have a mul by -1 of an add, try distributing the -1 among the |
| 2836 | // add operands. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2837 | if (Ops.size() == 2) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2838 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Ops[1])) { |
| 2839 | SmallVector<const SCEV *, 4> NewOps; |
| 2840 | bool AnyFolded = false; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 2841 | for (const SCEV *AddOp : Add->operands()) { |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2842 | const SCEV *Mul = getMulExpr(Ops[0], AddOp, SCEV::FlagAnyWrap, |
| 2843 | Depth + 1); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 2844 | if (!isa<SCEVMulExpr>(Mul)) AnyFolded = true; |
| 2845 | NewOps.push_back(Mul); |
| 2846 | } |
| 2847 | if (AnyFolded) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2848 | return getAddExpr(NewOps, SCEV::FlagAnyWrap, Depth + 1); |
Sanjoy Das | 6391459 | 2015-10-18 00:29:20 +0000 | [diff] [blame] | 2849 | } else if (const auto *AddRec = dyn_cast<SCEVAddRecExpr>(Ops[1])) { |
Andrew Trick | e92dcce | 2011-03-14 17:38:54 +0000 | [diff] [blame] | 2850 | // Negation preserves a recurrence's no self-wrap property. |
| 2851 | SmallVector<const SCEV *, 4> Operands; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 2852 | for (const SCEV *AddRecOp : AddRec->operands()) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2853 | Operands.push_back(getMulExpr(Ops[0], AddRecOp, SCEV::FlagAnyWrap, |
| 2854 | Depth + 1)); |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 2855 | |
Andrew Trick | e92dcce | 2011-03-14 17:38:54 +0000 | [diff] [blame] | 2856 | return getAddRecExpr(Operands, AddRec->getLoop(), |
| 2857 | AddRec->getNoWrapFlags(SCEV::FlagNW)); |
| 2858 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2859 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2860 | } |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 2861 | |
| 2862 | if (Ops.size() == 1) |
| 2863 | return Ops[0]; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2864 | } |
| 2865 | |
| 2866 | // Skip over the add expression until we get to a multiply. |
| 2867 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scMulExpr) |
| 2868 | ++Idx; |
| 2869 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2870 | // If there are mul operands inline them all into this expression. |
| 2871 | if (Idx < Ops.size()) { |
| 2872 | bool DeletedMul = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 2873 | while (const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Ops[Idx])) { |
Li Huang | fcfe8cd | 2016-10-20 21:38:39 +0000 | [diff] [blame] | 2874 | if (Ops.size() > MulOpsInlineThreshold) |
| 2875 | break; |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2876 | // If we have an mul, expand the mul operands onto the end of the |
| 2877 | // operands list. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2878 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 2879 | Ops.append(Mul->op_begin(), Mul->op_end()); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2880 | DeletedMul = true; |
| 2881 | } |
| 2882 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2883 | // If we deleted at least one mul, we added operands to the end of the |
| 2884 | // list, and they are not necessarily sorted. Recurse to resort and |
| 2885 | // resimplify any operands we just acquired. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2886 | if (DeletedMul) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2887 | return getMulExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2888 | } |
| 2889 | |
| 2890 | // If there are any add recurrences in the operands list, see if any other |
| 2891 | // added values are loop invariant. If so, we can fold them into the |
| 2892 | // recurrence. |
| 2893 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scAddRecExpr) |
| 2894 | ++Idx; |
| 2895 | |
| 2896 | // Scan over all recurrences, trying to fold loop invariants into them. |
| 2897 | for (; Idx < Ops.size() && isa<SCEVAddRecExpr>(Ops[Idx]); ++Idx) { |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2898 | // Scan all of the other operands to this mul and add them to the vector |
| 2899 | // if they are loop invariant w.r.t. the recurrence. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2900 | SmallVector<const SCEV *, 8> LIOps; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 2901 | const SCEVAddRecExpr *AddRec = cast<SCEVAddRecExpr>(Ops[Idx]); |
Dan Gohman | 0f2de01 | 2010-08-29 14:55:19 +0000 | [diff] [blame] | 2902 | const Loop *AddRecLoop = AddRec->getLoop(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2903 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
Max Kazantsev | d8fe3eb | 2017-05-30 10:54:58 +0000 | [diff] [blame] | 2904 | if (isAvailableAtLoopEntry(Ops[i], AddRecLoop)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2905 | LIOps.push_back(Ops[i]); |
| 2906 | Ops.erase(Ops.begin()+i); |
| 2907 | --i; --e; |
| 2908 | } |
| 2909 | |
| 2910 | // If we found some loop invariants, fold them into the recurrence. |
| 2911 | if (!LIOps.empty()) { |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 2912 | // NLI * LI * {Start,+,Step} --> NLI * {LI*Start,+,LI*Step} |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 2913 | SmallVector<const SCEV *, 4> NewOps; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2914 | NewOps.reserve(AddRec->getNumOperands()); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2915 | const SCEV *Scale = getMulExpr(LIOps, SCEV::FlagAnyWrap, Depth + 1); |
Dan Gohman | 8f5954f | 2010-06-17 23:34:09 +0000 | [diff] [blame] | 2916 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2917 | NewOps.push_back(getMulExpr(Scale, AddRec->getOperand(i), |
| 2918 | SCEV::FlagAnyWrap, Depth + 1)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2919 | |
Dan Gohman | 1620613 | 2010-06-30 07:16:37 +0000 | [diff] [blame] | 2920 | // Build the new addrec. Propagate the NUW and NSW flags if both the |
| 2921 | // outer mul and the inner addrec are guaranteed to have no overflow. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2922 | // |
| 2923 | // No self-wrap cannot be guaranteed after changing the step size, but |
Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 2924 | // will be inferred if either NUW or NSW is true. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 2925 | Flags = AddRec->getNoWrapFlags(clearFlags(Flags, SCEV::FlagNW)); |
| 2926 | const SCEV *NewRec = getAddRecExpr(NewOps, AddRecLoop, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2927 | |
| 2928 | // If all of the other operands were loop invariant, we are done. |
| 2929 | if (Ops.size() == 1) return NewRec; |
| 2930 | |
Nick Lewycky | db66b82 | 2011-09-06 05:08:09 +0000 | [diff] [blame] | 2931 | // Otherwise, multiply the folded AddRec by the non-invariant parts. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2932 | for (unsigned i = 0;; ++i) |
| 2933 | if (Ops[i] == AddRec) { |
| 2934 | Ops[i] = NewRec; |
| 2935 | break; |
| 2936 | } |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2937 | return getMulExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2938 | } |
| 2939 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2940 | // Okay, if there weren't any loop invariants to be folded, check to see |
| 2941 | // if there are multiple AddRec's with the same loop induction variable |
| 2942 | // being multiplied together. If so, we can fold them. |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2943 | |
| 2944 | // {A1,+,A2,+,...,+,An}<L> * {B1,+,B2,+,...,+,Bn}<L> |
| 2945 | // = {x=1 in [ sum y=x..2x [ sum z=max(y-x, y-n)..min(x,n) [ |
| 2946 | // choose(x, 2x)*choose(2x-y, x-z)*A_{y-z}*B_z |
| 2947 | // ]]],+,...up to x=2n}. |
| 2948 | // Note that the arguments to choose() are always integers with values |
| 2949 | // known at compile time, never SCEV objects. |
| 2950 | // |
| 2951 | // The implementation avoids pointless extra computations when the two |
| 2952 | // addrec's are of different length (mathematically, it's equivalent to |
| 2953 | // an infinite stream of zeros on the right). |
| 2954 | bool OpsModified = false; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2955 | for (unsigned OtherIdx = Idx+1; |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2956 | OtherIdx != Ops.size() && isa<SCEVAddRecExpr>(Ops[OtherIdx]); |
Nick Lewycky | e0aa54b | 2011-09-06 21:42:18 +0000 | [diff] [blame] | 2957 | ++OtherIdx) { |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2958 | const SCEVAddRecExpr *OtherAddRec = |
| 2959 | dyn_cast<SCEVAddRecExpr>(Ops[OtherIdx]); |
| 2960 | if (!OtherAddRec || OtherAddRec->getLoop() != AddRecLoop) |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2961 | continue; |
| 2962 | |
Max Kazantsev | 0e9e079 | 2017-07-23 15:40:19 +0000 | [diff] [blame] | 2963 | // Limit max number of arguments to avoid creation of unreasonably big |
| 2964 | // SCEVAddRecs with very complex operands. |
| 2965 | if (AddRec->getNumOperands() + OtherAddRec->getNumOperands() - 1 > |
| 2966 | MaxAddRecSize) |
| 2967 | continue; |
| 2968 | |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2969 | bool Overflow = false; |
| 2970 | Type *Ty = AddRec->getType(); |
| 2971 | bool LargerThan64Bits = getTypeSizeInBits(Ty) > 64; |
| 2972 | SmallVector<const SCEV*, 7> AddRecOps; |
| 2973 | for (int x = 0, xe = AddRec->getNumOperands() + |
| 2974 | OtherAddRec->getNumOperands() - 1; x != xe && !Overflow; ++x) { |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 2975 | const SCEV *Term = getZero(Ty); |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2976 | for (int y = x, ye = 2*x+1; y != ye && !Overflow; ++y) { |
| 2977 | uint64_t Coeff1 = Choose(x, 2*x - y, Overflow); |
| 2978 | for (int z = std::max(y-x, y-(int)AddRec->getNumOperands()+1), |
| 2979 | ze = std::min(x+1, (int)OtherAddRec->getNumOperands()); |
| 2980 | z < ze && !Overflow; ++z) { |
| 2981 | uint64_t Coeff2 = Choose(2*x - y, x-z, Overflow); |
| 2982 | uint64_t Coeff; |
| 2983 | if (LargerThan64Bits) |
| 2984 | Coeff = umul_ov(Coeff1, Coeff2, Overflow); |
| 2985 | else |
| 2986 | Coeff = Coeff1*Coeff2; |
| 2987 | const SCEV *CoeffTerm = getConstant(Ty, Coeff); |
| 2988 | const SCEV *Term1 = AddRec->getOperand(y-z); |
| 2989 | const SCEV *Term2 = OtherAddRec->getOperand(z); |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 2990 | Term = getAddExpr(Term, getMulExpr(CoeffTerm, Term1, Term2, |
| 2991 | SCEV::FlagAnyWrap, Depth + 1), |
| 2992 | SCEV::FlagAnyWrap, Depth + 1); |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2993 | } |
Andrew Trick | 946f76b | 2012-05-30 03:35:17 +0000 | [diff] [blame] | 2994 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2995 | AddRecOps.push_back(Term); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 2996 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 2997 | if (!Overflow) { |
| 2998 | const SCEV *NewAddRec = getAddRecExpr(AddRecOps, AddRec->getLoop(), |
| 2999 | SCEV::FlagAnyWrap); |
| 3000 | if (Ops.size() == 2) return NewAddRec; |
| 3001 | Ops[Idx] = NewAddRec; |
| 3002 | Ops.erase(Ops.begin() + OtherIdx); --OtherIdx; |
| 3003 | OpsModified = true; |
| 3004 | AddRec = dyn_cast<SCEVAddRecExpr>(NewAddRec); |
| 3005 | if (!AddRec) |
| 3006 | break; |
| 3007 | } |
Nick Lewycky | e0aa54b | 2011-09-06 21:42:18 +0000 | [diff] [blame] | 3008 | } |
Nick Lewycky | 9775640 | 2014-09-01 05:17:15 +0000 | [diff] [blame] | 3009 | if (OpsModified) |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 3010 | return getMulExpr(Ops, SCEV::FlagAnyWrap, Depth + 1); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3011 | |
| 3012 | // Otherwise couldn't fold anything into this recurrence. Move onto the |
| 3013 | // next one. |
| 3014 | } |
| 3015 | |
| 3016 | // Okay, it looks like we really DO need an mul expr. Check to see if we |
| 3017 | // already have one, otherwise create a new one. |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 3018 | return getOrCreateMulExpr(Ops, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3019 | } |
| 3020 | |
Alexandre Isoard | 405728f | 2017-09-01 14:59:59 +0000 | [diff] [blame] | 3021 | /// Represents an unsigned remainder expression based on unsigned division. |
| 3022 | const SCEV *ScalarEvolution::getURemExpr(const SCEV *LHS, |
| 3023 | const SCEV *RHS) { |
| 3024 | assert(getEffectiveSCEVType(LHS->getType()) == |
| 3025 | getEffectiveSCEVType(RHS->getType()) && |
| 3026 | "SCEVURemExpr operand types don't match!"); |
| 3027 | |
| 3028 | // Short-circuit easy cases |
| 3029 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) { |
| 3030 | // If constant is one, the result is trivial |
| 3031 | if (RHSC->getValue()->isOne()) |
| 3032 | return getZero(LHS->getType()); // X urem 1 --> 0 |
| 3033 | |
| 3034 | // If constant is a power of two, fold into a zext(trunc(LHS)). |
| 3035 | if (RHSC->getAPInt().isPowerOf2()) { |
| 3036 | Type *FullTy = LHS->getType(); |
| 3037 | Type *TruncTy = |
| 3038 | IntegerType::get(getContext(), RHSC->getAPInt().logBase2()); |
| 3039 | return getZeroExtendExpr(getTruncateExpr(LHS, TruncTy), FullTy); |
| 3040 | } |
| 3041 | } |
| 3042 | |
| 3043 | // Fallback to %a == %x urem %y == %x -<nuw> ((%x udiv %y) *<nuw> %y) |
| 3044 | const SCEV *UDiv = getUDivExpr(LHS, RHS); |
| 3045 | const SCEV *Mult = getMulExpr(UDiv, RHS, SCEV::FlagNUW); |
| 3046 | return getMinusSCEV(LHS, Mult, SCEV::FlagNUW); |
| 3047 | } |
| 3048 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3049 | /// Get a canonical unsigned division expression, or something simpler if |
| 3050 | /// possible. |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3051 | const SCEV *ScalarEvolution::getUDivExpr(const SCEV *LHS, |
| 3052 | const SCEV *RHS) { |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3053 | assert(getEffectiveSCEVType(LHS->getType()) == |
| 3054 | getEffectiveSCEVType(RHS->getType()) && |
| 3055 | "SCEVUDivExpr operand types don't match!"); |
| 3056 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3057 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) { |
Craig Topper | ca2c876 | 2017-07-06 18:39:49 +0000 | [diff] [blame] | 3058 | if (RHSC->getValue()->isOne()) |
Dan Gohman | 8a8ad7d | 2009-08-20 16:42:55 +0000 | [diff] [blame] | 3059 | return LHS; // X udiv 1 --> x |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3060 | // If the denominator is zero, the result of the udiv is undefined. Don't |
| 3061 | // try to analyze it, because the resolution chosen here may differ from |
| 3062 | // the resolution chosen in other parts of the compiler. |
| 3063 | if (!RHSC->getValue()->isZero()) { |
| 3064 | // Determine if the division can be folded into the operands of |
| 3065 | // its operands. |
| 3066 | // TODO: Generalize this to non-constants by using known-bits information. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3067 | Type *Ty = LHS->getType(); |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3068 | unsigned LZ = RHSC->getAPInt().countLeadingZeros(); |
Dan Gohman | db764c6 | 2010-08-04 19:52:50 +0000 | [diff] [blame] | 3069 | unsigned MaxShiftAmt = getTypeSizeInBits(Ty) - LZ - 1; |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3070 | // For non-power-of-two values, effectively round the value up to the |
| 3071 | // nearest power of two. |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3072 | if (!RHSC->getAPInt().isPowerOf2()) |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3073 | ++MaxShiftAmt; |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3074 | IntegerType *ExtTy = |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3075 | IntegerType::get(getContext(), getTypeSizeInBits(Ty) + MaxShiftAmt); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3076 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(LHS)) |
| 3077 | if (const SCEVConstant *Step = |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 3078 | dyn_cast<SCEVConstant>(AR->getStepRecurrence(*this))) { |
| 3079 | // {X,+,N}/C --> {X/C,+,N/C} if safe and N/C can be folded. |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3080 | const APInt &StepInt = Step->getAPInt(); |
| 3081 | const APInt &DivInt = RHSC->getAPInt(); |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 3082 | if (!StepInt.urem(DivInt) && |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3083 | getZeroExtendExpr(AR, ExtTy) == |
| 3084 | getAddRecExpr(getZeroExtendExpr(AR->getStart(), ExtTy), |
| 3085 | getZeroExtendExpr(Step, ExtTy), |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3086 | AR->getLoop(), SCEV::FlagAnyWrap)) { |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3087 | SmallVector<const SCEV *, 4> Operands; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 3088 | for (const SCEV *Op : AR->operands()) |
| 3089 | Operands.push_back(getUDivExpr(Op, RHS)); |
| 3090 | return getAddRecExpr(Operands, AR->getLoop(), SCEV::FlagNW); |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 3091 | } |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 3092 | /// Get a canonical UDivExpr for a recurrence. |
| 3093 | /// {X,+,N}/C => {Y,+,N}/C where Y=X-(X%N). Safe when C%N=0. |
| 3094 | // We can currently only fold X%N if X is constant. |
| 3095 | const SCEVConstant *StartC = dyn_cast<SCEVConstant>(AR->getStart()); |
| 3096 | if (StartC && !DivInt.urem(StepInt) && |
| 3097 | getZeroExtendExpr(AR, ExtTy) == |
| 3098 | getAddRecExpr(getZeroExtendExpr(AR->getStart(), ExtTy), |
| 3099 | getZeroExtendExpr(Step, ExtTy), |
| 3100 | AR->getLoop(), SCEV::FlagAnyWrap)) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3101 | const APInt &StartInt = StartC->getAPInt(); |
Andrew Trick | 6d45a01 | 2011-08-06 07:00:37 +0000 | [diff] [blame] | 3102 | const APInt &StartRem = StartInt.urem(StepInt); |
| 3103 | if (StartRem != 0) |
| 3104 | LHS = getAddRecExpr(getConstant(StartInt - StartRem), Step, |
| 3105 | AR->getLoop(), SCEV::FlagNW); |
| 3106 | } |
| 3107 | } |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3108 | // (A*B)/C --> A*(B/C) if safe and B/C can be folded. |
| 3109 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(LHS)) { |
| 3110 | SmallVector<const SCEV *, 4> Operands; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 3111 | for (const SCEV *Op : M->operands()) |
| 3112 | Operands.push_back(getZeroExtendExpr(Op, ExtTy)); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3113 | if (getZeroExtendExpr(M, ExtTy) == getMulExpr(Operands)) |
| 3114 | // Find an operand that's safely divisible. |
| 3115 | for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) { |
| 3116 | const SCEV *Op = M->getOperand(i); |
| 3117 | const SCEV *Div = getUDivExpr(Op, RHSC); |
| 3118 | if (!isa<SCEVUDivExpr>(Div) && getMulExpr(Div, RHSC) == Op) { |
| 3119 | Operands = SmallVector<const SCEV *, 4>(M->op_begin(), |
| 3120 | M->op_end()); |
| 3121 | Operands[i] = Div; |
| 3122 | return getMulExpr(Operands); |
| 3123 | } |
| 3124 | } |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 3125 | } |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3126 | // (A+B)/C --> (A/C + B/C) if safe and A/C and B/C can be folded. |
Andrew Trick | 7d1eea8 | 2011-04-27 18:17:36 +0000 | [diff] [blame] | 3127 | if (const SCEVAddExpr *A = dyn_cast<SCEVAddExpr>(LHS)) { |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3128 | SmallVector<const SCEV *, 4> Operands; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 3129 | for (const SCEV *Op : A->operands()) |
| 3130 | Operands.push_back(getZeroExtendExpr(Op, ExtTy)); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3131 | if (getZeroExtendExpr(A, ExtTy) == getAddExpr(Operands)) { |
| 3132 | Operands.clear(); |
| 3133 | for (unsigned i = 0, e = A->getNumOperands(); i != e; ++i) { |
| 3134 | const SCEV *Op = getUDivExpr(A->getOperand(i), RHS); |
| 3135 | if (isa<SCEVUDivExpr>(Op) || |
| 3136 | getMulExpr(Op, RHS) != A->getOperand(i)) |
| 3137 | break; |
| 3138 | Operands.push_back(Op); |
| 3139 | } |
| 3140 | if (Operands.size() == A->getNumOperands()) |
| 3141 | return getAddExpr(Operands); |
| 3142 | } |
| 3143 | } |
Dan Gohman | c3a3cb4 | 2009-05-08 20:18:49 +0000 | [diff] [blame] | 3144 | |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 3145 | // Fold if both operands are constant. |
| 3146 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(LHS)) { |
| 3147 | Constant *LHSCV = LHSC->getValue(); |
| 3148 | Constant *RHSCV = RHSC->getValue(); |
| 3149 | return getConstant(cast<ConstantInt>(ConstantExpr::getUDiv(LHSCV, |
| 3150 | RHSCV))); |
| 3151 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3152 | } |
| 3153 | } |
| 3154 | |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3155 | FoldingSetNodeID ID; |
| 3156 | ID.AddInteger(scUDivExpr); |
| 3157 | ID.AddPointer(LHS); |
| 3158 | ID.AddPointer(RHS); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3159 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3160 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3161 | SCEV *S = new (SCEVAllocator) SCEVUDivExpr(ID.Intern(SCEVAllocator), |
| 3162 | LHS, RHS); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3163 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 3164 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3165 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3166 | } |
| 3167 | |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3168 | static const APInt gcd(const SCEVConstant *C1, const SCEVConstant *C2) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3169 | APInt A = C1->getAPInt().abs(); |
| 3170 | APInt B = C2->getAPInt().abs(); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3171 | uint32_t ABW = A.getBitWidth(); |
| 3172 | uint32_t BBW = B.getBitWidth(); |
| 3173 | |
| 3174 | if (ABW > BBW) |
| 3175 | B = B.zext(ABW); |
| 3176 | else if (ABW < BBW) |
| 3177 | A = A.zext(BBW); |
| 3178 | |
Craig Topper | 69f1af2 | 2017-05-06 05:22:56 +0000 | [diff] [blame] | 3179 | return APIntOps::GreatestCommonDivisor(std::move(A), std::move(B)); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3180 | } |
| 3181 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3182 | /// Get a canonical unsigned division expression, or something simpler if |
| 3183 | /// possible. There is no representation for an exact udiv in SCEV IR, but we |
| 3184 | /// can attempt to remove factors from the LHS and RHS. We can't do this when |
| 3185 | /// it's not exact because the udiv may be clearing bits. |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3186 | const SCEV *ScalarEvolution::getUDivExactExpr(const SCEV *LHS, |
| 3187 | const SCEV *RHS) { |
| 3188 | // TODO: we could try to find factors in all sorts of things, but for now we |
| 3189 | // just deal with u/exact (multiply, constant). See SCEVDivision towards the |
| 3190 | // end of this file for inspiration. |
| 3191 | |
| 3192 | const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(LHS); |
Eli Friedman | f1f49c8 | 2017-01-18 23:56:42 +0000 | [diff] [blame] | 3193 | if (!Mul || !Mul->hasNoUnsignedWrap()) |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3194 | return getUDivExpr(LHS, RHS); |
| 3195 | |
| 3196 | if (const SCEVConstant *RHSCst = dyn_cast<SCEVConstant>(RHS)) { |
| 3197 | // If the mulexpr multiplies by a constant, then that constant must be the |
| 3198 | // first element of the mulexpr. |
Sanjoy Das | 6391459 | 2015-10-18 00:29:20 +0000 | [diff] [blame] | 3199 | if (const auto *LHSCst = dyn_cast<SCEVConstant>(Mul->getOperand(0))) { |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3200 | if (LHSCst == RHSCst) { |
| 3201 | SmallVector<const SCEV *, 2> Operands; |
| 3202 | Operands.append(Mul->op_begin() + 1, Mul->op_end()); |
| 3203 | return getMulExpr(Operands); |
| 3204 | } |
| 3205 | |
| 3206 | // We can't just assume that LHSCst divides RHSCst cleanly, it could be |
| 3207 | // that there's a factor provided by one of the other terms. We need to |
| 3208 | // check. |
| 3209 | APInt Factor = gcd(LHSCst, RHSCst); |
| 3210 | if (!Factor.isIntN(1)) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3211 | LHSCst = |
| 3212 | cast<SCEVConstant>(getConstant(LHSCst->getAPInt().udiv(Factor))); |
| 3213 | RHSCst = |
| 3214 | cast<SCEVConstant>(getConstant(RHSCst->getAPInt().udiv(Factor))); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3215 | SmallVector<const SCEV *, 2> Operands; |
| 3216 | Operands.push_back(LHSCst); |
| 3217 | Operands.append(Mul->op_begin() + 1, Mul->op_end()); |
| 3218 | LHS = getMulExpr(Operands); |
| 3219 | RHS = RHSCst; |
Nick Lewycky | 629199c | 2014-01-27 10:47:44 +0000 | [diff] [blame] | 3220 | Mul = dyn_cast<SCEVMulExpr>(LHS); |
| 3221 | if (!Mul) |
| 3222 | return getUDivExactExpr(LHS, RHS); |
Nick Lewycky | 31eaca5 | 2014-01-27 10:04:03 +0000 | [diff] [blame] | 3223 | } |
| 3224 | } |
| 3225 | } |
| 3226 | |
| 3227 | for (int i = 0, e = Mul->getNumOperands(); i != e; ++i) { |
| 3228 | if (Mul->getOperand(i) == RHS) { |
| 3229 | SmallVector<const SCEV *, 2> Operands; |
| 3230 | Operands.append(Mul->op_begin(), Mul->op_begin() + i); |
| 3231 | Operands.append(Mul->op_begin() + i + 1, Mul->op_end()); |
| 3232 | return getMulExpr(Operands); |
| 3233 | } |
| 3234 | } |
| 3235 | |
| 3236 | return getUDivExpr(LHS, RHS); |
| 3237 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3238 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3239 | /// Get an add recurrence expression for the specified loop. Simplify the |
| 3240 | /// expression as much as possible. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3241 | const SCEV *ScalarEvolution::getAddRecExpr(const SCEV *Start, const SCEV *Step, |
| 3242 | const Loop *L, |
| 3243 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3244 | SmallVector<const SCEV *, 4> Operands; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3245 | Operands.push_back(Start); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3246 | if (const SCEVAddRecExpr *StepChrec = dyn_cast<SCEVAddRecExpr>(Step)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3247 | if (StepChrec->getLoop() == L) { |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 3248 | Operands.append(StepChrec->op_begin(), StepChrec->op_end()); |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 3249 | return getAddRecExpr(Operands, L, maskFlags(Flags, SCEV::FlagNW)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3250 | } |
| 3251 | |
| 3252 | Operands.push_back(Step); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3253 | return getAddRecExpr(Operands, L, Flags); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3254 | } |
| 3255 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3256 | /// Get an add recurrence expression for the specified loop. Simplify the |
| 3257 | /// expression as much as possible. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 3258 | const SCEV * |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3259 | ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3260 | const Loop *L, SCEV::NoWrapFlags Flags) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3261 | if (Operands.size() == 1) return Operands[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3262 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3263 | Type *ETy = getEffectiveSCEVType(Operands[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3264 | for (unsigned i = 1, e = Operands.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 3265 | assert(getEffectiveSCEVType(Operands[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3266 | "SCEVAddRecExpr operand types don't match!"); |
Dan Gohman | d3a32ae | 2010-11-17 20:48:38 +0000 | [diff] [blame] | 3267 | for (unsigned i = 0, e = Operands.size(); i != e; ++i) |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 3268 | assert(isLoopInvariant(Operands[i], L) && |
Dan Gohman | d3a32ae | 2010-11-17 20:48:38 +0000 | [diff] [blame] | 3269 | "SCEVAddRecExpr operand is not loop-invariant!"); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3270 | #endif |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3271 | |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 3272 | if (Operands.back()->isZero()) { |
| 3273 | Operands.pop_back(); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3274 | return getAddRecExpr(Operands, L, SCEV::FlagAnyWrap); // {X,+,0} --> X |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 3275 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3276 | |
Dan Gohman | cf9c64e | 2010-02-19 18:49:22 +0000 | [diff] [blame] | 3277 | // It's tempting to want to call getMaxBackedgeTakenCount count here and |
| 3278 | // use that information to infer NUW and NSW flags. However, computing a |
| 3279 | // BE count requires calling getAddRecExpr, so we may not yet have a |
| 3280 | // meaningful BE count at this point (and if we don't, we'd be stuck |
| 3281 | // with a SCEVCouldNotCompute as the cached BE count). |
| 3282 | |
Sanjoy Das | 81401d4 | 2015-01-10 23:41:24 +0000 | [diff] [blame] | 3283 | Flags = StrengthenNoWrapFlags(this, scAddRecExpr, Operands, Flags); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3284 | |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 3285 | // Canonicalize nested AddRecs in by nesting them in order of loop depth. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3286 | if (const SCEVAddRecExpr *NestedAR = dyn_cast<SCEVAddRecExpr>(Operands[0])) { |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 3287 | const Loop *NestedLoop = NestedAR->getLoop(); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3288 | if (L->contains(NestedLoop) |
| 3289 | ? (L->getLoopDepth() < NestedLoop->getLoopDepth()) |
| 3290 | : (!NestedLoop->contains(L) && |
| 3291 | DT.dominates(L->getHeader(), NestedLoop->getHeader()))) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3292 | SmallVector<const SCEV *, 4> NestedOperands(NestedAR->op_begin(), |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 3293 | NestedAR->op_end()); |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 3294 | Operands[0] = NestedAR->getStart(); |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 3295 | // AddRecs require their operands be loop-invariant with respect to their |
| 3296 | // loops. Don't perform this transformation if it would break this |
| 3297 | // requirement. |
Sanjoy Das | 3b827c7 | 2015-11-29 23:40:53 +0000 | [diff] [blame] | 3298 | bool AllInvariant = all_of( |
| 3299 | Operands, [&](const SCEV *Op) { return isLoopInvariant(Op, L); }); |
Sanjoy Das | f07d2a7 | 2015-10-18 00:29:23 +0000 | [diff] [blame] | 3300 | |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 3301 | if (AllInvariant) { |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3302 | // Create a recurrence for the outer loop with the same step size. |
| 3303 | // |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3304 | // The outer recurrence keeps its NW flag but only keeps NUW/NSW if the |
| 3305 | // inner recurrence has the same property. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 3306 | SCEV::NoWrapFlags OuterFlags = |
| 3307 | maskFlags(Flags, SCEV::FlagNW | NestedAR->getNoWrapFlags()); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3308 | |
| 3309 | NestedOperands[0] = getAddRecExpr(Operands, L, OuterFlags); |
Sanjoy Das | 3b827c7 | 2015-11-29 23:40:53 +0000 | [diff] [blame] | 3310 | AllInvariant = all_of(NestedOperands, [&](const SCEV *Op) { |
| 3311 | return isLoopInvariant(Op, NestedLoop); |
| 3312 | }); |
Sanjoy Das | f07d2a7 | 2015-10-18 00:29:23 +0000 | [diff] [blame] | 3313 | |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3314 | if (AllInvariant) { |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 3315 | // Ok, both add recurrences are valid after the transformation. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3316 | // |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3317 | // The inner recurrence keeps its NW flag but only keeps NUW/NSW if |
| 3318 | // the outer recurrence has the same property. |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 3319 | SCEV::NoWrapFlags InnerFlags = |
| 3320 | maskFlags(NestedAR->getNoWrapFlags(), SCEV::FlagNW | Flags); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3321 | return getAddRecExpr(NestedOperands, NestedLoop, InnerFlags); |
| 3322 | } |
Dan Gohman | cc030b7 | 2009-06-26 22:36:20 +0000 | [diff] [blame] | 3323 | } |
| 3324 | // Reset Operands to its original state. |
| 3325 | Operands[0] = NestedAR; |
Dan Gohman | 223a5d2 | 2008-08-08 18:33:12 +0000 | [diff] [blame] | 3326 | } |
| 3327 | } |
| 3328 | |
Dan Gohman | 8d67d2f | 2010-01-19 22:27:22 +0000 | [diff] [blame] | 3329 | // Okay, it looks like we really DO need an addrec expr. Check to see if we |
| 3330 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3331 | FoldingSetNodeID ID; |
| 3332 | ID.AddInteger(scAddRecExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3333 | for (unsigned i = 0, e = Operands.size(); i != e; ++i) |
| 3334 | ID.AddPointer(Operands[i]); |
| 3335 | ID.AddPointer(L); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3336 | void *IP = nullptr; |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3337 | SCEVAddRecExpr *S = |
| 3338 | static_cast<SCEVAddRecExpr *>(UniqueSCEVs.FindNodeOrInsertPos(ID, IP)); |
| 3339 | if (!S) { |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 3340 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Operands.size()); |
| 3341 | std::uninitialized_copy(Operands.begin(), Operands.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3342 | S = new (SCEVAllocator) SCEVAddRecExpr(ID.Intern(SCEVAllocator), |
| 3343 | O, Operands.size(), L); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3344 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 3345 | addToLoopUseLists(S); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 3346 | } |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 3347 | S->setNoWrapFlags(Flags); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3348 | return S; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3349 | } |
| 3350 | |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3351 | const SCEV * |
Peter Collingbourne | 8dff039 | 2016-11-13 06:59:50 +0000 | [diff] [blame] | 3352 | ScalarEvolution::getGEPExpr(GEPOperator *GEP, |
| 3353 | const SmallVectorImpl<const SCEV *> &IndexExprs) { |
| 3354 | const SCEV *BaseExpr = getSCEV(GEP->getPointerOperand()); |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3355 | // getSCEV(Base)->getType() has the same address space as Base->getType() |
| 3356 | // because SCEV::getType() preserves the address space. |
| 3357 | Type *IntPtrTy = getEffectiveSCEVType(BaseExpr->getType()); |
| 3358 | // FIXME(PR23527): Don't blindly transfer the inbounds flag from the GEP |
| 3359 | // instruction to its SCEV, because the Instruction may be guarded by control |
| 3360 | // flow and the no-overflow bits may not be valid for the expression in any |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3361 | // context. This can be fixed similarly to how these flags are handled for |
| 3362 | // adds. |
Peter Collingbourne | 8dff039 | 2016-11-13 06:59:50 +0000 | [diff] [blame] | 3363 | SCEV::NoWrapFlags Wrap = GEP->isInBounds() ? SCEV::FlagNSW |
| 3364 | : SCEV::FlagAnyWrap; |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3365 | |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 3366 | const SCEV *TotalOffset = getZero(IntPtrTy); |
Peter Collingbourne | 4568158 | 2016-12-02 03:05:41 +0000 | [diff] [blame] | 3367 | // The array size is unimportant. The first thing we do on CurTy is getting |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3368 | // its element type. |
Peter Collingbourne | 4568158 | 2016-12-02 03:05:41 +0000 | [diff] [blame] | 3369 | Type *CurTy = ArrayType::get(GEP->getSourceElementType(), 0); |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 3370 | for (const SCEV *IndexExpr : IndexExprs) { |
| 3371 | // Compute the (potentially symbolic) offset in bytes for this index. |
| 3372 | if (StructType *STy = dyn_cast<StructType>(CurTy)) { |
| 3373 | // For a struct, add the member offset. |
| 3374 | ConstantInt *Index = cast<SCEVConstant>(IndexExpr)->getValue(); |
| 3375 | unsigned FieldNo = Index->getZExtValue(); |
| 3376 | const SCEV *FieldOffset = getOffsetOfExpr(IntPtrTy, STy, FieldNo); |
| 3377 | |
| 3378 | // Add the field offset to the running total offset. |
| 3379 | TotalOffset = getAddExpr(TotalOffset, FieldOffset); |
| 3380 | |
| 3381 | // Update CurTy to the type of the field at Index. |
| 3382 | CurTy = STy->getTypeAtIndex(Index); |
| 3383 | } else { |
| 3384 | // Update CurTy to its element type. |
| 3385 | CurTy = cast<SequentialType>(CurTy)->getElementType(); |
| 3386 | // For an array, add the element offset, explicitly scaled. |
| 3387 | const SCEV *ElementSize = getSizeOfExpr(IntPtrTy, CurTy); |
| 3388 | // Getelementptr indices are signed. |
| 3389 | IndexExpr = getTruncateOrSignExtend(IndexExpr, IntPtrTy); |
| 3390 | |
| 3391 | // Multiply the index by the element size to compute the element offset. |
| 3392 | const SCEV *LocalOffset = getMulExpr(IndexExpr, ElementSize, Wrap); |
| 3393 | |
| 3394 | // Add the element offset to the running total offset. |
| 3395 | TotalOffset = getAddExpr(TotalOffset, LocalOffset); |
| 3396 | } |
| 3397 | } |
| 3398 | |
| 3399 | // Add the total offset from all the GEP indices to the base. |
| 3400 | return getAddExpr(BaseExpr, TotalOffset, Wrap); |
| 3401 | } |
| 3402 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3403 | const SCEV *ScalarEvolution::getSMaxExpr(const SCEV *LHS, |
| 3404 | const SCEV *RHS) { |
Benjamin Kramer | 3bc1edf | 2016-07-02 11:41:39 +0000 | [diff] [blame] | 3405 | SmallVector<const SCEV *, 2> Ops = {LHS, RHS}; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3406 | return getSMaxExpr(Ops); |
| 3407 | } |
| 3408 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3409 | const SCEV * |
| 3410 | ScalarEvolution::getSMaxExpr(SmallVectorImpl<const SCEV *> &Ops) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3411 | assert(!Ops.empty() && "Cannot get empty smax!"); |
| 3412 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3413 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3414 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3415 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 3416 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3417 | "SCEVSMaxExpr operand types don't match!"); |
| 3418 | #endif |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3419 | |
| 3420 | // Sort by complexity, this groups all similar expression types together. |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 3421 | GroupByComplexity(Ops, &LI, DT); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3422 | |
| 3423 | // If there are any constants, fold them together. |
| 3424 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3425 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3426 | ++Idx; |
| 3427 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3428 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3429 | // We found two constants, fold them together! |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3430 | ConstantInt *Fold = ConstantInt::get( |
| 3431 | getContext(), APIntOps::smax(LHSC->getAPInt(), RHSC->getAPInt())); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3432 | Ops[0] = getConstant(Fold); |
| 3433 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 3434 | if (Ops.size() == 1) return Ops[0]; |
| 3435 | LHSC = cast<SCEVConstant>(Ops[0]); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3436 | } |
| 3437 | |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3438 | // If we are left with a constant minimum-int, strip it off. |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3439 | if (cast<SCEVConstant>(Ops[0])->getValue()->isMinValue(true)) { |
| 3440 | Ops.erase(Ops.begin()); |
| 3441 | --Idx; |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3442 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isMaxValue(true)) { |
| 3443 | // If we have an smax with a constant maximum-int, it will always be |
| 3444 | // maximum-int. |
| 3445 | return Ops[0]; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3446 | } |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3447 | |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 3448 | if (Ops.size() == 1) return Ops[0]; |
| 3449 | } |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3450 | |
| 3451 | // Find the first SMax |
| 3452 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scSMaxExpr) |
| 3453 | ++Idx; |
| 3454 | |
| 3455 | // Check to see if one of the operands is an SMax. If so, expand its operands |
| 3456 | // onto our operand list, and recurse to simplify. |
| 3457 | if (Idx < Ops.size()) { |
| 3458 | bool DeletedSMax = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3459 | while (const SCEVSMaxExpr *SMax = dyn_cast<SCEVSMaxExpr>(Ops[Idx])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3460 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 3461 | Ops.append(SMax->op_begin(), SMax->op_end()); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3462 | DeletedSMax = true; |
| 3463 | } |
| 3464 | |
| 3465 | if (DeletedSMax) |
| 3466 | return getSMaxExpr(Ops); |
| 3467 | } |
| 3468 | |
| 3469 | // Okay, check to see if the same value occurs in the operand list twice. If |
| 3470 | // so, delete one. Since we sorted the list, these values are required to |
| 3471 | // be adjacent. |
| 3472 | for (unsigned i = 0, e = Ops.size()-1; i != e; ++i) |
Dan Gohman | 7ef0dc2 | 2010-04-13 16:51:03 +0000 | [diff] [blame] | 3473 | // X smax Y smax Y --> X smax Y |
| 3474 | // X smax Y --> X, if X is always greater than Y |
| 3475 | if (Ops[i] == Ops[i+1] || |
| 3476 | isKnownPredicate(ICmpInst::ICMP_SGE, Ops[i], Ops[i+1])) { |
| 3477 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+2); |
| 3478 | --i; --e; |
| 3479 | } else if (isKnownPredicate(ICmpInst::ICMP_SLE, Ops[i], Ops[i+1])) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3480 | Ops.erase(Ops.begin()+i, Ops.begin()+i+1); |
| 3481 | --i; --e; |
| 3482 | } |
| 3483 | |
| 3484 | if (Ops.size() == 1) return Ops[0]; |
| 3485 | |
| 3486 | assert(!Ops.empty() && "Reduced smax down to nothing!"); |
| 3487 | |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3488 | // Okay, it looks like we really DO need an smax expr. Check to see if we |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3489 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3490 | FoldingSetNodeID ID; |
| 3491 | ID.AddInteger(scSMaxExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3492 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 3493 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3494 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3495 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 3496 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 3497 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3498 | SCEV *S = new (SCEVAllocator) SCEVSMaxExpr(ID.Intern(SCEVAllocator), |
| 3499 | O, Ops.size()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3500 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 3501 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3502 | return S; |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 3503 | } |
| 3504 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3505 | const SCEV *ScalarEvolution::getUMaxExpr(const SCEV *LHS, |
| 3506 | const SCEV *RHS) { |
Benjamin Kramer | 3bc1edf | 2016-07-02 11:41:39 +0000 | [diff] [blame] | 3507 | SmallVector<const SCEV *, 2> Ops = {LHS, RHS}; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3508 | return getUMaxExpr(Ops); |
| 3509 | } |
| 3510 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3511 | const SCEV * |
| 3512 | ScalarEvolution::getUMaxExpr(SmallVectorImpl<const SCEV *> &Ops) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3513 | assert(!Ops.empty() && "Cannot get empty umax!"); |
| 3514 | if (Ops.size() == 1) return Ops[0]; |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3515 | #ifndef NDEBUG |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3516 | Type *ETy = getEffectiveSCEVType(Ops[0]->getType()); |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3517 | for (unsigned i = 1, e = Ops.size(); i != e; ++i) |
Dan Gohman | b6c773e | 2010-08-16 16:13:54 +0000 | [diff] [blame] | 3518 | assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy && |
Dan Gohman | d33f36e | 2009-05-18 15:44:58 +0000 | [diff] [blame] | 3519 | "SCEVUMaxExpr operand types don't match!"); |
| 3520 | #endif |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3521 | |
| 3522 | // Sort by complexity, this groups all similar expression types together. |
Max Kazantsev | b09b5db | 2017-05-16 07:27:06 +0000 | [diff] [blame] | 3523 | GroupByComplexity(Ops, &LI, DT); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3524 | |
| 3525 | // If there are any constants, fold them together. |
| 3526 | unsigned Idx = 0; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3527 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3528 | ++Idx; |
| 3529 | assert(Idx < Ops.size()); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3530 | while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3531 | // We found two constants, fold them together! |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 3532 | ConstantInt *Fold = ConstantInt::get( |
| 3533 | getContext(), APIntOps::umax(LHSC->getAPInt(), RHSC->getAPInt())); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3534 | Ops[0] = getConstant(Fold); |
| 3535 | Ops.erase(Ops.begin()+1); // Erase the folded element |
| 3536 | if (Ops.size() == 1) return Ops[0]; |
| 3537 | LHSC = cast<SCEVConstant>(Ops[0]); |
| 3538 | } |
| 3539 | |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3540 | // If we are left with a constant minimum-int, strip it off. |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3541 | if (cast<SCEVConstant>(Ops[0])->getValue()->isMinValue(false)) { |
| 3542 | Ops.erase(Ops.begin()); |
| 3543 | --Idx; |
Dan Gohman | f57bdb7 | 2009-06-24 14:46:22 +0000 | [diff] [blame] | 3544 | } else if (cast<SCEVConstant>(Ops[0])->getValue()->isMaxValue(false)) { |
| 3545 | // If we have an umax with a constant maximum-int, it will always be |
| 3546 | // maximum-int. |
| 3547 | return Ops[0]; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3548 | } |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3549 | |
Dan Gohman | fe4b291 | 2010-04-13 16:49:23 +0000 | [diff] [blame] | 3550 | if (Ops.size() == 1) return Ops[0]; |
| 3551 | } |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3552 | |
| 3553 | // Find the first UMax |
| 3554 | while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < scUMaxExpr) |
| 3555 | ++Idx; |
| 3556 | |
| 3557 | // Check to see if one of the operands is a UMax. If so, expand its operands |
| 3558 | // onto our operand list, and recurse to simplify. |
| 3559 | if (Idx < Ops.size()) { |
| 3560 | bool DeletedUMax = false; |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3561 | while (const SCEVUMaxExpr *UMax = dyn_cast<SCEVUMaxExpr>(Ops[Idx])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3562 | Ops.erase(Ops.begin()+Idx); |
Dan Gohman | dd41bba | 2010-06-21 19:47:52 +0000 | [diff] [blame] | 3563 | Ops.append(UMax->op_begin(), UMax->op_end()); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3564 | DeletedUMax = true; |
| 3565 | } |
| 3566 | |
| 3567 | if (DeletedUMax) |
| 3568 | return getUMaxExpr(Ops); |
| 3569 | } |
| 3570 | |
| 3571 | // Okay, check to see if the same value occurs in the operand list twice. If |
| 3572 | // so, delete one. Since we sorted the list, these values are required to |
| 3573 | // be adjacent. |
| 3574 | for (unsigned i = 0, e = Ops.size()-1; i != e; ++i) |
Dan Gohman | 7ef0dc2 | 2010-04-13 16:51:03 +0000 | [diff] [blame] | 3575 | // X umax Y umax Y --> X umax Y |
| 3576 | // X umax Y --> X, if X is always greater than Y |
| 3577 | if (Ops[i] == Ops[i+1] || |
| 3578 | isKnownPredicate(ICmpInst::ICMP_UGE, Ops[i], Ops[i+1])) { |
| 3579 | Ops.erase(Ops.begin()+i+1, Ops.begin()+i+2); |
| 3580 | --i; --e; |
| 3581 | } else if (isKnownPredicate(ICmpInst::ICMP_ULE, Ops[i], Ops[i+1])) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3582 | Ops.erase(Ops.begin()+i, Ops.begin()+i+1); |
| 3583 | --i; --e; |
| 3584 | } |
| 3585 | |
| 3586 | if (Ops.size() == 1) return Ops[0]; |
| 3587 | |
| 3588 | assert(!Ops.empty() && "Reduced umax down to nothing!"); |
| 3589 | |
| 3590 | // Okay, it looks like we really DO need a umax expr. Check to see if we |
| 3591 | // already have one, otherwise create a new one. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3592 | FoldingSetNodeID ID; |
| 3593 | ID.AddInteger(scUMaxExpr); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3594 | for (unsigned i = 0, e = Ops.size(); i != e; ++i) |
| 3595 | ID.AddPointer(Ops[i]); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3596 | void *IP = nullptr; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3597 | if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S; |
Dan Gohman | 0052449 | 2010-03-18 01:17:13 +0000 | [diff] [blame] | 3598 | const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size()); |
| 3599 | std::uninitialized_copy(Ops.begin(), Ops.end(), O); |
Dan Gohman | 01c65a2 | 2010-03-18 18:49:47 +0000 | [diff] [blame] | 3600 | SCEV *S = new (SCEVAllocator) SCEVUMaxExpr(ID.Intern(SCEVAllocator), |
| 3601 | O, Ops.size()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3602 | UniqueSCEVs.InsertNode(S, IP); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 3603 | addToLoopUseLists(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3604 | return S; |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 3605 | } |
| 3606 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3607 | const SCEV *ScalarEvolution::getSMinExpr(const SCEV *LHS, |
| 3608 | const SCEV *RHS) { |
Dan Gohman | 692b468 | 2009-06-22 03:18:45 +0000 | [diff] [blame] | 3609 | // ~smax(~x, ~y) == smin(x, y). |
| 3610 | return getNotSCEV(getSMaxExpr(getNotSCEV(LHS), getNotSCEV(RHS))); |
| 3611 | } |
| 3612 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3613 | const SCEV *ScalarEvolution::getUMinExpr(const SCEV *LHS, |
| 3614 | const SCEV *RHS) { |
Dan Gohman | 692b468 | 2009-06-22 03:18:45 +0000 | [diff] [blame] | 3615 | // ~umax(~x, ~y) == umin(x, y) |
| 3616 | return getNotSCEV(getUMaxExpr(getNotSCEV(LHS), getNotSCEV(RHS))); |
| 3617 | } |
| 3618 | |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3619 | const SCEV *ScalarEvolution::getSizeOfExpr(Type *IntTy, Type *AllocTy) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3620 | // We can bypass creating a target-independent |
Dan Gohman | 11862a6 | 2010-04-12 23:03:26 +0000 | [diff] [blame] | 3621 | // constant expression and then folding it back into a ConstantInt. |
| 3622 | // This is just a compile-time optimization. |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 3623 | return getConstant(IntTy, getDataLayout().getTypeAllocSize(AllocTy)); |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 3624 | } |
| 3625 | |
Matt Arsenault | a90a18e | 2013-09-10 19:55:24 +0000 | [diff] [blame] | 3626 | const SCEV *ScalarEvolution::getOffsetOfExpr(Type *IntTy, |
| 3627 | StructType *STy, |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 3628 | unsigned FieldNo) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3629 | // We can bypass creating a target-independent |
Dan Gohman | 11862a6 | 2010-04-12 23:03:26 +0000 | [diff] [blame] | 3630 | // constant expression and then folding it back into a ConstantInt. |
| 3631 | // This is just a compile-time optimization. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3632 | return getConstant( |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 3633 | IntTy, getDataLayout().getStructLayout(STy)->getElementOffset(FieldNo)); |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3634 | } |
| 3635 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3636 | const SCEV *ScalarEvolution::getUnknown(Value *V) { |
Dan Gohman | f436bac | 2009-06-24 00:54:57 +0000 | [diff] [blame] | 3637 | // Don't attempt to do anything other than create a SCEVUnknown object |
| 3638 | // here. createSCEV only calls getUnknown after checking for all other |
| 3639 | // interesting possibilities, and any other code that calls getUnknown |
| 3640 | // is doing so in order to hide a value from SCEV canonicalization. |
| 3641 | |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3642 | FoldingSetNodeID ID; |
| 3643 | ID.AddInteger(scUnknown); |
| 3644 | ID.AddPointer(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3645 | void *IP = nullptr; |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 3646 | if (SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) { |
| 3647 | assert(cast<SCEVUnknown>(S)->getValue() == V && |
| 3648 | "Stale SCEVUnknown in uniquing map!"); |
| 3649 | return S; |
| 3650 | } |
| 3651 | SCEV *S = new (SCEVAllocator) SCEVUnknown(ID.Intern(SCEVAllocator), V, this, |
| 3652 | FirstUnknown); |
| 3653 | FirstUnknown = cast<SCEVUnknown>(S); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 3654 | UniqueSCEVs.InsertNode(S, IP); |
| 3655 | return S; |
Chris Lattner | b4f681b | 2004-04-15 15:07:24 +0000 | [diff] [blame] | 3656 | } |
| 3657 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3658 | //===----------------------------------------------------------------------===// |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3659 | // Basic SCEV Analysis and PHI Idiom Recognition Code |
| 3660 | // |
| 3661 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3662 | /// Test if values of the given type are analyzable within the SCEV |
| 3663 | /// framework. This primarily includes integer types, and it can optionally |
| 3664 | /// include pointer types if the ScalarEvolution class has access to |
| 3665 | /// target-specific information. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3666 | bool ScalarEvolution::isSCEVable(Type *Ty) const { |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3667 | // Integers and pointers are always SCEVable. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3668 | return Ty->isIntegerTy() || Ty->isPointerTy(); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3669 | } |
| 3670 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3671 | /// Return the size in bits of the specified type, for which isSCEVable must |
| 3672 | /// return true. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3673 | uint64_t ScalarEvolution::getTypeSizeInBits(Type *Ty) const { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3674 | assert(isSCEVable(Ty) && "Type is not SCEVable!"); |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 3675 | return getDataLayout().getTypeSizeInBits(Ty); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3676 | } |
| 3677 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3678 | /// Return a type with the same bitwidth as the given type and which represents |
| 3679 | /// how SCEV will treat the given type, for which isSCEVable must return |
| 3680 | /// true. For pointer types, this is the pointer-sized integer type. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3681 | Type *ScalarEvolution::getEffectiveSCEVType(Type *Ty) const { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3682 | assert(isSCEVable(Ty) && "Type is not SCEVable!"); |
| 3683 | |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 3684 | if (Ty->isIntegerTy()) |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3685 | return Ty; |
| 3686 | |
Dan Gohman | bf2a9ae | 2009-08-18 16:46:41 +0000 | [diff] [blame] | 3687 | // The only other support type is pointer. |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3688 | assert(Ty->isPointerTy() && "Unexpected non-pointer non-integer type!"); |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 3689 | return getDataLayout().getIntPtrType(Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3690 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3691 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 3692 | Type *ScalarEvolution::getWiderType(Type *T1, Type *T2) const { |
| 3693 | return getTypeSizeInBits(T1) >= getTypeSizeInBits(T2) ? T1 : T2; |
| 3694 | } |
| 3695 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3696 | const SCEV *ScalarEvolution::getCouldNotCompute() { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 3697 | return CouldNotCompute.get(); |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 3698 | } |
| 3699 | |
Sanjoy Das | 7d75267 | 2015-12-08 04:32:54 +0000 | [diff] [blame] | 3700 | bool ScalarEvolution::checkValidity(const SCEV *S) const { |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 3701 | bool ContainsNulls = SCEVExprContains(S, [](const SCEV *S) { |
| 3702 | auto *SU = dyn_cast<SCEVUnknown>(S); |
| 3703 | return SU && SU->getValue() == nullptr; |
| 3704 | }); |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3705 | |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 3706 | return !ContainsNulls; |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3707 | } |
| 3708 | |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3709 | bool ScalarEvolution::containsAddRecurrence(const SCEV *S) { |
Sanjoy Das | a260214 | 2016-09-27 18:01:46 +0000 | [diff] [blame] | 3710 | HasRecMapType::iterator I = HasRecMap.find(S); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3711 | if (I != HasRecMap.end()) |
| 3712 | return I->second; |
| 3713 | |
Sanjoy Das | 0ae390a | 2016-11-10 06:33:54 +0000 | [diff] [blame] | 3714 | bool FoundAddRec = SCEVExprContains(S, isa<SCEVAddRecExpr, const SCEV *>); |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 3715 | HasRecMap.insert({S, FoundAddRec}); |
| 3716 | return FoundAddRec; |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3717 | } |
| 3718 | |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3719 | /// Try to split a SCEVAddExpr into a pair of {SCEV, ConstantInt}. |
| 3720 | /// If \p S is a SCEVAddExpr and is composed of a sub SCEV S' and an |
| 3721 | /// offset I, then return {S', I}, else return {\p S, nullptr}. |
| 3722 | static std::pair<const SCEV *, ConstantInt *> splitAddExpr(const SCEV *S) { |
| 3723 | const auto *Add = dyn_cast<SCEVAddExpr>(S); |
| 3724 | if (!Add) |
| 3725 | return {S, nullptr}; |
| 3726 | |
| 3727 | if (Add->getNumOperands() != 2) |
| 3728 | return {S, nullptr}; |
| 3729 | |
| 3730 | auto *ConstOp = dyn_cast<SCEVConstant>(Add->getOperand(0)); |
| 3731 | if (!ConstOp) |
| 3732 | return {S, nullptr}; |
| 3733 | |
| 3734 | return {Add->getOperand(1), ConstOp->getValue()}; |
| 3735 | } |
| 3736 | |
| 3737 | /// Return the ValueOffsetPair set for \p S. \p S can be represented |
| 3738 | /// by the value and offset from any ValueOffsetPair in the set. |
| 3739 | SetVector<ScalarEvolution::ValueOffsetPair> * |
| 3740 | ScalarEvolution::getSCEVValues(const SCEV *S) { |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3741 | ExprValueMapType::iterator SI = ExprValueMap.find_as(S); |
| 3742 | if (SI == ExprValueMap.end()) |
| 3743 | return nullptr; |
| 3744 | #ifndef NDEBUG |
| 3745 | if (VerifySCEVMap) { |
| 3746 | // Check there is no dangling Value in the set returned. |
| 3747 | for (const auto &VE : SI->second) |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3748 | assert(ValueExprMap.count(VE.first)); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3749 | } |
| 3750 | #endif |
| 3751 | return &SI->second; |
| 3752 | } |
| 3753 | |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3754 | /// Erase Value from ValueExprMap and ExprValueMap. ValueExprMap.erase(V) |
| 3755 | /// cannot be used separately. eraseValueFromMap should be used to remove |
| 3756 | /// V from ValueExprMap and ExprValueMap at the same time. |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3757 | void ScalarEvolution::eraseValueFromMap(Value *V) { |
| 3758 | ValueExprMapType::iterator I = ValueExprMap.find_as(V); |
| 3759 | if (I != ValueExprMap.end()) { |
| 3760 | const SCEV *S = I->second; |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3761 | // Remove {V, 0} from the set of ExprValueMap[S] |
| 3762 | if (SetVector<ValueOffsetPair> *SV = getSCEVValues(S)) |
| 3763 | SV->remove({V, nullptr}); |
| 3764 | |
| 3765 | // Remove {V, Offset} from the set of ExprValueMap[Stripped] |
| 3766 | const SCEV *Stripped; |
| 3767 | ConstantInt *Offset; |
| 3768 | std::tie(Stripped, Offset) = splitAddExpr(S); |
| 3769 | if (Offset != nullptr) { |
| 3770 | if (SetVector<ValueOffsetPair> *SV = getSCEVValues(Stripped)) |
| 3771 | SV->remove({V, Offset}); |
| 3772 | } |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3773 | ValueExprMap.erase(V); |
| 3774 | } |
| 3775 | } |
| 3776 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3777 | /// Return an existing SCEV if it exists, otherwise analyze the expression and |
| 3778 | /// create a new one. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3779 | const SCEV *ScalarEvolution::getSCEV(Value *V) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3780 | assert(isSCEVable(V->getType()) && "Value is not SCEVable!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3781 | |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3782 | const SCEV *S = getExistingSCEV(V); |
| 3783 | if (S == nullptr) { |
| 3784 | S = createSCEV(V); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3785 | // During PHI resolution, it is possible to create two SCEVs for the same |
| 3786 | // V, so it is needed to double check whether V->S is inserted into |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3787 | // ValueExprMap before insert S->{V, 0} into ExprValueMap. |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3788 | std::pair<ValueExprMapType::iterator, bool> Pair = |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 3789 | ValueExprMap.insert({SCEVCallbackVH(V, this), S}); |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3790 | if (Pair.second) { |
| 3791 | ExprValueMap[S].insert({V, nullptr}); |
| 3792 | |
| 3793 | // If S == Stripped + Offset, add Stripped -> {V, Offset} into |
| 3794 | // ExprValueMap. |
| 3795 | const SCEV *Stripped = S; |
| 3796 | ConstantInt *Offset = nullptr; |
| 3797 | std::tie(Stripped, Offset) = splitAddExpr(S); |
| 3798 | // If stripped is SCEVUnknown, don't bother to save |
| 3799 | // Stripped -> {V, offset}. It doesn't simplify and sometimes even |
| 3800 | // increase the complexity of the expansion code. |
| 3801 | // If V is GetElementPtrInst, don't save Stripped -> {V, offset} |
| 3802 | // because it may generate add/sub instead of GEP in SCEV expansion. |
| 3803 | if (Offset != nullptr && !isa<SCEVUnknown>(Stripped) && |
| 3804 | !isa<GetElementPtrInst>(V)) |
| 3805 | ExprValueMap[Stripped].insert({V, Offset}); |
| 3806 | } |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3807 | } |
| 3808 | return S; |
| 3809 | } |
| 3810 | |
| 3811 | const SCEV *ScalarEvolution::getExistingSCEV(Value *V) { |
| 3812 | assert(isSCEVable(V->getType()) && "Value is not SCEVable!"); |
| 3813 | |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3814 | ValueExprMapType::iterator I = ValueExprMap.find_as(V); |
| 3815 | if (I != ValueExprMap.end()) { |
| 3816 | const SCEV *S = I->second; |
Shuxin Yang | 23773b3 | 2013-07-12 07:25:38 +0000 | [diff] [blame] | 3817 | if (checkValidity(S)) |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3818 | return S; |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 3819 | eraseValueFromMap(V); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 3820 | forgetMemoizedResults(S); |
Shuxin Yang | efc4c01 | 2013-07-08 17:33:13 +0000 | [diff] [blame] | 3821 | } |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 3822 | return nullptr; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 3823 | } |
| 3824 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3825 | /// Return a SCEV corresponding to -V = -1*V |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3826 | const SCEV *ScalarEvolution::getNegativeSCEV(const SCEV *V, |
| 3827 | SCEV::NoWrapFlags Flags) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3828 | if (const SCEVConstant *VC = dyn_cast<SCEVConstant>(V)) |
Owen Anderson | 53a5221 | 2009-07-13 04:09:18 +0000 | [diff] [blame] | 3829 | return getConstant( |
Owen Anderson | 487375e | 2009-07-29 18:55:55 +0000 | [diff] [blame] | 3830 | cast<ConstantInt>(ConstantExpr::getNeg(VC->getValue()))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3831 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3832 | Type *Ty = V->getType(); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3833 | Ty = getEffectiveSCEVType(Ty); |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3834 | return getMulExpr( |
| 3835 | V, getConstant(cast<ConstantInt>(Constant::getAllOnesValue(Ty))), Flags); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3836 | } |
| 3837 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 3838 | /// Return a SCEV corresponding to ~V = -1-V |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3839 | const SCEV *ScalarEvolution::getNotSCEV(const SCEV *V) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 3840 | if (const SCEVConstant *VC = dyn_cast<SCEVConstant>(V)) |
Owen Anderson | 542619e | 2009-07-13 20:58:05 +0000 | [diff] [blame] | 3841 | return getConstant( |
Owen Anderson | 487375e | 2009-07-29 18:55:55 +0000 | [diff] [blame] | 3842 | cast<ConstantInt>(ConstantExpr::getNot(VC->getValue()))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3843 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3844 | Type *Ty = V->getType(); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3845 | Ty = getEffectiveSCEVType(Ty); |
Owen Anderson | 542619e | 2009-07-13 20:58:05 +0000 | [diff] [blame] | 3846 | const SCEV *AllOnes = |
Owen Anderson | 5a1acd9 | 2009-07-31 20:28:14 +0000 | [diff] [blame] | 3847 | getConstant(cast<ConstantInt>(Constant::getAllOnesValue(Ty))); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3848 | return getMinusSCEV(AllOnes, V); |
| 3849 | } |
| 3850 | |
Chris Lattner | fc87752 | 2011-01-09 22:26:35 +0000 | [diff] [blame] | 3851 | const SCEV *ScalarEvolution::getMinusSCEV(const SCEV *LHS, const SCEV *RHS, |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 3852 | SCEV::NoWrapFlags Flags, |
| 3853 | unsigned Depth) { |
Dan Gohman | 46f00a2 | 2010-07-20 16:53:00 +0000 | [diff] [blame] | 3854 | // Fast path: X - X --> 0. |
| 3855 | if (LHS == RHS) |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 3856 | return getZero(LHS->getType()); |
Dan Gohman | 46f00a2 | 2010-07-20 16:53:00 +0000 | [diff] [blame] | 3857 | |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3858 | // We represent LHS - RHS as LHS + (-1)*RHS. This transformation |
| 3859 | // makes it so that we cannot make much use of NUW. |
| 3860 | auto AddFlags = SCEV::FlagAnyWrap; |
| 3861 | const bool RHSIsNotMinSigned = |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 3862 | !getSignedRangeMin(RHS).isMinSignedValue(); |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 3863 | if (maskFlags(Flags, SCEV::FlagNSW) == SCEV::FlagNSW) { |
| 3864 | // Let M be the minimum representable signed value. Then (-1)*RHS |
| 3865 | // signed-wraps if and only if RHS is M. That can happen even for |
| 3866 | // a NSW subtraction because e.g. (-1)*M signed-wraps even though |
| 3867 | // -1 - M does not. So to transfer NSW from LHS - RHS to LHS + |
| 3868 | // (-1)*RHS, we need to prove that RHS != M. |
| 3869 | // |
| 3870 | // If LHS is non-negative and we know that LHS - RHS does not |
| 3871 | // signed-wrap, then RHS cannot be M. So we can rule out signed-wrap |
| 3872 | // either by proving that RHS > M or that LHS >= 0. |
| 3873 | if (RHSIsNotMinSigned || isKnownNonNegative(LHS)) { |
| 3874 | AddFlags = SCEV::FlagNSW; |
| 3875 | } |
| 3876 | } |
| 3877 | |
| 3878 | // FIXME: Find a correct way to transfer NSW to (-1)*M when LHS - |
| 3879 | // RHS is NSW and LHS >= 0. |
| 3880 | // |
| 3881 | // The difficulty here is that the NSW flag may have been proven |
| 3882 | // relative to a loop that is to be found in a recurrence in LHS and |
| 3883 | // not in RHS. Applying NSW to (-1)*M may then let the NSW have a |
| 3884 | // larger scope than intended. |
| 3885 | auto NegFlags = RHSIsNotMinSigned ? SCEV::FlagNSW : SCEV::FlagAnyWrap; |
| 3886 | |
Max Kazantsev | dc80366 | 2017-06-15 11:48:21 +0000 | [diff] [blame] | 3887 | return getAddExpr(LHS, getNegativeSCEV(RHS, NegFlags), AddFlags, Depth); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3888 | } |
| 3889 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3890 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3891 | ScalarEvolution::getTruncateOrZeroExtend(const SCEV *V, Type *Ty) { |
| 3892 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3893 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3894 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3895 | "Cannot truncate or zero extend with non-integer arguments!"); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3896 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3897 | return V; // No conversion |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3898 | if (getTypeSizeInBits(SrcTy) > getTypeSizeInBits(Ty)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3899 | return getTruncateExpr(V, Ty); |
| 3900 | return getZeroExtendExpr(V, Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3901 | } |
| 3902 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3903 | const SCEV * |
| 3904 | ScalarEvolution::getTruncateOrSignExtend(const SCEV *V, |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3905 | Type *Ty) { |
| 3906 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3907 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3908 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3909 | "Cannot truncate or zero extend with non-integer arguments!"); |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3910 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3911 | return V; // No conversion |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 3912 | if (getTypeSizeInBits(SrcTy) > getTypeSizeInBits(Ty)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 3913 | return getTruncateExpr(V, Ty); |
| 3914 | return getSignExtendExpr(V, Ty); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 3915 | } |
| 3916 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3917 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3918 | ScalarEvolution::getNoopOrZeroExtend(const SCEV *V, Type *Ty) { |
| 3919 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3920 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3921 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3922 | "Cannot noop or zero extend with non-integer arguments!"); |
| 3923 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3924 | "getNoopOrZeroExtend cannot truncate!"); |
| 3925 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3926 | return V; // No conversion |
| 3927 | return getZeroExtendExpr(V, Ty); |
| 3928 | } |
| 3929 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3930 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3931 | ScalarEvolution::getNoopOrSignExtend(const SCEV *V, Type *Ty) { |
| 3932 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3933 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3934 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3935 | "Cannot noop or sign extend with non-integer arguments!"); |
| 3936 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3937 | "getNoopOrSignExtend cannot truncate!"); |
| 3938 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3939 | return V; // No conversion |
| 3940 | return getSignExtendExpr(V, Ty); |
| 3941 | } |
| 3942 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3943 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3944 | ScalarEvolution::getNoopOrAnyExtend(const SCEV *V, Type *Ty) { |
| 3945 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3946 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3947 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | 8db2edc | 2009-06-13 15:56:47 +0000 | [diff] [blame] | 3948 | "Cannot noop or any extend with non-integer arguments!"); |
| 3949 | assert(getTypeSizeInBits(SrcTy) <= getTypeSizeInBits(Ty) && |
| 3950 | "getNoopOrAnyExtend cannot truncate!"); |
| 3951 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3952 | return V; // No conversion |
| 3953 | return getAnyExtendExpr(V, Ty); |
| 3954 | } |
| 3955 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3956 | const SCEV * |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3957 | ScalarEvolution::getTruncateOrNoop(const SCEV *V, Type *Ty) { |
| 3958 | Type *SrcTy = V->getType(); |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 3959 | assert((SrcTy->isIntegerTy() || SrcTy->isPointerTy()) && |
| 3960 | (Ty->isIntegerTy() || Ty->isPointerTy()) && |
Dan Gohman | e712a2f | 2009-05-13 03:46:30 +0000 | [diff] [blame] | 3961 | "Cannot truncate or noop with non-integer arguments!"); |
| 3962 | assert(getTypeSizeInBits(SrcTy) >= getTypeSizeInBits(Ty) && |
| 3963 | "getTruncateOrNoop cannot extend!"); |
| 3964 | if (getTypeSizeInBits(SrcTy) == getTypeSizeInBits(Ty)) |
| 3965 | return V; // No conversion |
| 3966 | return getTruncateExpr(V, Ty); |
| 3967 | } |
| 3968 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3969 | const SCEV *ScalarEvolution::getUMaxFromMismatchedTypes(const SCEV *LHS, |
| 3970 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3971 | const SCEV *PromotedLHS = LHS; |
| 3972 | const SCEV *PromotedRHS = RHS; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 3973 | |
| 3974 | if (getTypeSizeInBits(LHS->getType()) > getTypeSizeInBits(RHS->getType())) |
| 3975 | PromotedRHS = getZeroExtendExpr(RHS, LHS->getType()); |
| 3976 | else |
| 3977 | PromotedLHS = getNoopOrZeroExtend(LHS, RHS->getType()); |
| 3978 | |
| 3979 | return getUMaxExpr(PromotedLHS, PromotedRHS); |
| 3980 | } |
| 3981 | |
Dan Gohman | abd1709 | 2009-06-24 14:49:00 +0000 | [diff] [blame] | 3982 | const SCEV *ScalarEvolution::getUMinFromMismatchedTypes(const SCEV *LHS, |
| 3983 | const SCEV *RHS) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 3984 | const SCEV *PromotedLHS = LHS; |
| 3985 | const SCEV *PromotedRHS = RHS; |
Dan Gohman | 2bc2230 | 2009-06-22 15:03:27 +0000 | [diff] [blame] | 3986 | |
| 3987 | if (getTypeSizeInBits(LHS->getType()) > getTypeSizeInBits(RHS->getType())) |
| 3988 | PromotedRHS = getZeroExtendExpr(RHS, LHS->getType()); |
| 3989 | else |
| 3990 | PromotedLHS = getNoopOrZeroExtend(LHS, RHS->getType()); |
| 3991 | |
| 3992 | return getUMinExpr(PromotedLHS, PromotedRHS); |
| 3993 | } |
| 3994 | |
Andrew Trick | 87716c9 | 2011-03-17 23:51:11 +0000 | [diff] [blame] | 3995 | const SCEV *ScalarEvolution::getPointerBase(const SCEV *V) { |
| 3996 | // A pointer operand may evaluate to a nonpointer expression, such as null. |
| 3997 | if (!V->getType()->isPointerTy()) |
| 3998 | return V; |
| 3999 | |
| 4000 | if (const SCEVCastExpr *Cast = dyn_cast<SCEVCastExpr>(V)) { |
| 4001 | return getPointerBase(Cast->getOperand()); |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 4002 | } else if (const SCEVNAryExpr *NAry = dyn_cast<SCEVNAryExpr>(V)) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4003 | const SCEV *PtrOp = nullptr; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 4004 | for (const SCEV *NAryOp : NAry->operands()) { |
| 4005 | if (NAryOp->getType()->isPointerTy()) { |
Andrew Trick | 87716c9 | 2011-03-17 23:51:11 +0000 | [diff] [blame] | 4006 | // Cannot find the base of an expression with multiple pointer operands. |
| 4007 | if (PtrOp) |
| 4008 | return V; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 4009 | PtrOp = NAryOp; |
Andrew Trick | 87716c9 | 2011-03-17 23:51:11 +0000 | [diff] [blame] | 4010 | } |
| 4011 | } |
| 4012 | if (!PtrOp) |
| 4013 | return V; |
| 4014 | return getPointerBase(PtrOp); |
| 4015 | } |
| 4016 | return V; |
| 4017 | } |
| 4018 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 4019 | /// Push users of the given Instruction onto the given Worklist. |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4020 | static void |
| 4021 | PushDefUseChildren(Instruction *I, |
| 4022 | SmallVectorImpl<Instruction *> &Worklist) { |
| 4023 | // Push the def-use children onto the Worklist stack. |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 4024 | for (User *U : I->users()) |
| 4025 | Worklist.push_back(cast<Instruction>(U)); |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4026 | } |
| 4027 | |
Sanjoy Das | f1e9cae0 | 2016-03-01 19:28:01 +0000 | [diff] [blame] | 4028 | void ScalarEvolution::forgetSymbolicName(Instruction *PN, const SCEV *SymName) { |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4029 | SmallVector<Instruction *, 16> Worklist; |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 4030 | PushDefUseChildren(PN, Worklist); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4031 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4032 | SmallPtrSet<Instruction *, 8> Visited; |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 4033 | Visited.insert(PN); |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4034 | while (!Worklist.empty()) { |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 4035 | Instruction *I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 4036 | if (!Visited.insert(I).second) |
| 4037 | continue; |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 4038 | |
Sanjoy Das | 6391459 | 2015-10-18 00:29:20 +0000 | [diff] [blame] | 4039 | auto It = ValueExprMap.find_as(static_cast<Value *>(I)); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 4040 | if (It != ValueExprMap.end()) { |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 4041 | const SCEV *Old = It->second; |
| 4042 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4043 | // Short-circuit the def-use traversal if the symbolic name |
| 4044 | // ceases to appear in expressions. |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 4045 | if (Old != SymName && !hasOperand(Old, SymName)) |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4046 | continue; |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 4047 | |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4048 | // SCEVUnknown for a PHI either means that it has an unrecognized |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 4049 | // structure, it's a PHI that's in the progress of being computed |
| 4050 | // by createNodeForPHI, or it's a single-value PHI. In the first case, |
| 4051 | // additional loop trip count information isn't going to change anything. |
| 4052 | // In the second case, createNodeForPHI will perform the necessary |
| 4053 | // updates on its own when it gets to that point. In the third, we do |
| 4054 | // want to forget the SCEVUnknown. |
| 4055 | if (!isa<PHINode>(I) || |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 4056 | !isa<SCEVUnknown>(Old) || |
| 4057 | (I != PN && Old == SymName)) { |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 4058 | eraseValueFromMap(It->first); |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 4059 | forgetMemoizedResults(Old); |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 4060 | } |
Dan Gohman | 0b89dff | 2009-07-25 01:13:03 +0000 | [diff] [blame] | 4061 | } |
| 4062 | |
| 4063 | PushDefUseChildren(I, Worklist); |
| 4064 | } |
Chris Lattner | 7b0fbe7 | 2005-02-13 04:37:18 +0000 | [diff] [blame] | 4065 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4066 | |
Benjamin Kramer | 83709b1 | 2015-11-16 09:01:28 +0000 | [diff] [blame] | 4067 | namespace { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4068 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4069 | class SCEVInitRewriter : public SCEVRewriteVisitor<SCEVInitRewriter> { |
| 4070 | public: |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 4071 | static const SCEV *rewrite(const SCEV *S, const Loop *L, |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4072 | ScalarEvolution &SE) { |
| 4073 | SCEVInitRewriter Rewriter(L, SE); |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 4074 | const SCEV *Result = Rewriter.visit(S); |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4075 | return Rewriter.isValid() ? Result : SE.getCouldNotCompute(); |
| 4076 | } |
| 4077 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4078 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
Max Kazantsev | 627ad0f | 2017-05-18 08:26:41 +0000 | [diff] [blame] | 4079 | if (!SE.isLoopInvariant(Expr, L)) |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4080 | Valid = false; |
| 4081 | return Expr; |
| 4082 | } |
| 4083 | |
| 4084 | const SCEV *visitAddRecExpr(const SCEVAddRecExpr *Expr) { |
| 4085 | // Only allow AddRecExprs for this loop. |
| 4086 | if (Expr->getLoop() == L) |
| 4087 | return Expr->getStart(); |
| 4088 | Valid = false; |
| 4089 | return Expr; |
| 4090 | } |
| 4091 | |
| 4092 | bool isValid() { return Valid; } |
| 4093 | |
| 4094 | private: |
Jatin Bhateja | 7410eea | 2017-11-26 15:08:41 +0000 | [diff] [blame] | 4095 | explicit SCEVInitRewriter(const Loop *L, ScalarEvolution &SE) |
| 4096 | : SCEVRewriteVisitor(SE), L(L) {} |
| 4097 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4098 | const Loop *L; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4099 | bool Valid = true; |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4100 | }; |
| 4101 | |
Jatin Bhateja | c61ade1 | 2017-11-13 16:43:24 +0000 | [diff] [blame] | 4102 | /// This class evaluates the compare condition by matching it against the |
| 4103 | /// condition of loop latch. If there is a match we assume a true value |
| 4104 | /// for the condition while building SCEV nodes. |
| 4105 | class SCEVBackedgeConditionFolder |
| 4106 | : public SCEVRewriteVisitor<SCEVBackedgeConditionFolder> { |
| 4107 | public: |
| 4108 | static const SCEV *rewrite(const SCEV *S, const Loop *L, |
| 4109 | ScalarEvolution &SE) { |
Reid Kleckner | e021f70 | 2017-11-13 18:43:11 +0000 | [diff] [blame] | 4110 | bool IsPosBECond = false; |
Jatin Bhateja | c61ade1 | 2017-11-13 16:43:24 +0000 | [diff] [blame] | 4111 | Value *BECond = nullptr; |
| 4112 | if (BasicBlock *Latch = L->getLoopLatch()) { |
| 4113 | BranchInst *BI = dyn_cast<BranchInst>(Latch->getTerminator()); |
Jatin Bhateja | a1da5e4 | 2017-11-26 02:01:01 +0000 | [diff] [blame] | 4114 | if (BI && BI->isConditional()) { |
Jatin Bhateja | 7410eea | 2017-11-26 15:08:41 +0000 | [diff] [blame] | 4115 | assert(BI->getSuccessor(0) != BI->getSuccessor(1) && |
| 4116 | "Both outgoing branches should not target same header!"); |
Jatin Bhateja | c61ade1 | 2017-11-13 16:43:24 +0000 | [diff] [blame] | 4117 | BECond = BI->getCondition(); |
| 4118 | IsPosBECond = BI->getSuccessor(0) == L->getHeader(); |
| 4119 | } else { |
| 4120 | return S; |
| 4121 | } |
| 4122 | } |
| 4123 | SCEVBackedgeConditionFolder Rewriter(L, BECond, IsPosBECond, SE); |
| 4124 | return Rewriter.visit(S); |
| 4125 | } |
| 4126 | |
| 4127 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
| 4128 | const SCEV *Result = Expr; |
| 4129 | bool InvariantF = SE.isLoopInvariant(Expr, L); |
| 4130 | |
| 4131 | if (!InvariantF) { |
| 4132 | Instruction *I = cast<Instruction>(Expr->getValue()); |
| 4133 | switch (I->getOpcode()) { |
| 4134 | case Instruction::Select: { |
| 4135 | SelectInst *SI = cast<SelectInst>(I); |
| 4136 | Optional<const SCEV *> Res = |
| 4137 | compareWithBackedgeCondition(SI->getCondition()); |
| 4138 | if (Res.hasValue()) { |
| 4139 | bool IsOne = cast<SCEVConstant>(Res.getValue())->getValue()->isOne(); |
| 4140 | Result = SE.getSCEV(IsOne ? SI->getTrueValue() : SI->getFalseValue()); |
| 4141 | } |
| 4142 | break; |
| 4143 | } |
| 4144 | default: { |
| 4145 | Optional<const SCEV *> Res = compareWithBackedgeCondition(I); |
| 4146 | if (Res.hasValue()) |
| 4147 | Result = Res.getValue(); |
| 4148 | break; |
| 4149 | } |
| 4150 | } |
| 4151 | } |
| 4152 | return Result; |
| 4153 | } |
| 4154 | |
| 4155 | private: |
| 4156 | explicit SCEVBackedgeConditionFolder(const Loop *L, Value *BECond, |
| 4157 | bool IsPosBECond, ScalarEvolution &SE) |
| 4158 | : SCEVRewriteVisitor(SE), L(L), BackedgeCond(BECond), |
| 4159 | IsPositiveBECond(IsPosBECond) {} |
| 4160 | |
| 4161 | Optional<const SCEV *> compareWithBackedgeCondition(Value *IC); |
| 4162 | |
| 4163 | const Loop *L; |
| 4164 | /// Loop back condition. |
| 4165 | Value *BackedgeCond = nullptr; |
| 4166 | /// Set to true if loop back is on positive branch condition. |
| 4167 | bool IsPositiveBECond; |
| 4168 | }; |
| 4169 | |
| 4170 | Optional<const SCEV *> |
| 4171 | SCEVBackedgeConditionFolder::compareWithBackedgeCondition(Value *IC) { |
| 4172 | |
| 4173 | // If value matches the backedge condition for loop latch, |
| 4174 | // then return a constant evolution node based on loopback |
| 4175 | // branch taken. |
| 4176 | if (BackedgeCond == IC) |
| 4177 | return IsPositiveBECond ? SE.getOne(Type::getInt1Ty(SE.getContext())) |
| 4178 | : SE.getZero(Type::getInt1Ty(SE.getContext())); |
| 4179 | return None; |
| 4180 | } |
| 4181 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4182 | class SCEVShiftRewriter : public SCEVRewriteVisitor<SCEVShiftRewriter> { |
| 4183 | public: |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 4184 | static const SCEV *rewrite(const SCEV *S, const Loop *L, |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4185 | ScalarEvolution &SE) { |
| 4186 | SCEVShiftRewriter Rewriter(L, SE); |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 4187 | const SCEV *Result = Rewriter.visit(S); |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4188 | return Rewriter.isValid() ? Result : SE.getCouldNotCompute(); |
| 4189 | } |
| 4190 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4191 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
| 4192 | // Only allow AddRecExprs for this loop. |
Max Kazantsev | 627ad0f | 2017-05-18 08:26:41 +0000 | [diff] [blame] | 4193 | if (!SE.isLoopInvariant(Expr, L)) |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4194 | Valid = false; |
| 4195 | return Expr; |
| 4196 | } |
| 4197 | |
| 4198 | const SCEV *visitAddRecExpr(const SCEVAddRecExpr *Expr) { |
| 4199 | if (Expr->getLoop() == L && Expr->isAffine()) |
| 4200 | return SE.getMinusSCEV(Expr, Expr->getStepRecurrence(SE)); |
| 4201 | Valid = false; |
| 4202 | return Expr; |
| 4203 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4204 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4205 | bool isValid() { return Valid; } |
| 4206 | |
| 4207 | private: |
Jatin Bhateja | 7410eea | 2017-11-26 15:08:41 +0000 | [diff] [blame] | 4208 | explicit SCEVShiftRewriter(const Loop *L, ScalarEvolution &SE) |
| 4209 | : SCEVRewriteVisitor(SE), L(L) {} |
| 4210 | |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4211 | const Loop *L; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4212 | bool Valid = true; |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4213 | }; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4214 | |
Benjamin Kramer | 83709b1 | 2015-11-16 09:01:28 +0000 | [diff] [blame] | 4215 | } // end anonymous namespace |
Silviu Baranga | f91c807 | 2015-10-30 15:02:28 +0000 | [diff] [blame] | 4216 | |
Sanjoy Das | 724f5cf | 2016-03-03 18:31:29 +0000 | [diff] [blame] | 4217 | SCEV::NoWrapFlags |
| 4218 | ScalarEvolution::proveNoWrapViaConstantRanges(const SCEVAddRecExpr *AR) { |
| 4219 | if (!AR->isAffine()) |
| 4220 | return SCEV::FlagAnyWrap; |
| 4221 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4222 | using OBO = OverflowingBinaryOperator; |
| 4223 | |
Sanjoy Das | 724f5cf | 2016-03-03 18:31:29 +0000 | [diff] [blame] | 4224 | SCEV::NoWrapFlags Result = SCEV::FlagAnyWrap; |
| 4225 | |
| 4226 | if (!AR->hasNoSignedWrap()) { |
| 4227 | ConstantRange AddRecRange = getSignedRange(AR); |
| 4228 | ConstantRange IncRange = getSignedRange(AR->getStepRecurrence(*this)); |
| 4229 | |
| 4230 | auto NSWRegion = ConstantRange::makeGuaranteedNoWrapRegion( |
| 4231 | Instruction::Add, IncRange, OBO::NoSignedWrap); |
| 4232 | if (NSWRegion.contains(AddRecRange)) |
| 4233 | Result = ScalarEvolution::setFlags(Result, SCEV::FlagNSW); |
| 4234 | } |
| 4235 | |
| 4236 | if (!AR->hasNoUnsignedWrap()) { |
| 4237 | ConstantRange AddRecRange = getUnsignedRange(AR); |
| 4238 | ConstantRange IncRange = getUnsignedRange(AR->getStepRecurrence(*this)); |
| 4239 | |
| 4240 | auto NUWRegion = ConstantRange::makeGuaranteedNoWrapRegion( |
| 4241 | Instruction::Add, IncRange, OBO::NoUnsignedWrap); |
| 4242 | if (NUWRegion.contains(AddRecRange)) |
| 4243 | Result = ScalarEvolution::setFlags(Result, SCEV::FlagNUW); |
| 4244 | } |
| 4245 | |
| 4246 | return Result; |
| 4247 | } |
| 4248 | |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4249 | namespace { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4250 | |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4251 | /// Represents an abstract binary operation. This may exist as a |
| 4252 | /// normal instruction or constant expression, or may have been |
| 4253 | /// derived from an expression tree. |
| 4254 | struct BinaryOp { |
| 4255 | unsigned Opcode; |
| 4256 | Value *LHS; |
| 4257 | Value *RHS; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4258 | bool IsNSW = false; |
| 4259 | bool IsNUW = false; |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4260 | |
| 4261 | /// Op is set if this BinaryOp corresponds to a concrete LLVM instruction or |
| 4262 | /// constant expression. |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4263 | Operator *Op = nullptr; |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4264 | |
| 4265 | explicit BinaryOp(Operator *Op) |
| 4266 | : Opcode(Op->getOpcode()), LHS(Op->getOperand(0)), RHS(Op->getOperand(1)), |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4267 | Op(Op) { |
Sanjoy Das | e12c0e5 | 2016-03-31 05:14:26 +0000 | [diff] [blame] | 4268 | if (auto *OBO = dyn_cast<OverflowingBinaryOperator>(Op)) { |
| 4269 | IsNSW = OBO->hasNoSignedWrap(); |
| 4270 | IsNUW = OBO->hasNoUnsignedWrap(); |
| 4271 | } |
| 4272 | } |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4273 | |
Sanjoy Das | e12c0e5 | 2016-03-31 05:14:26 +0000 | [diff] [blame] | 4274 | explicit BinaryOp(unsigned Opcode, Value *LHS, Value *RHS, bool IsNSW = false, |
| 4275 | bool IsNUW = false) |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4276 | : Opcode(Opcode), LHS(LHS), RHS(RHS), IsNSW(IsNSW), IsNUW(IsNUW) {} |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4277 | }; |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4278 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4279 | } // end anonymous namespace |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4280 | |
| 4281 | /// Try to map \p V into a BinaryOp, and return \c None on failure. |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4282 | static Optional<BinaryOp> MatchBinaryOp(Value *V, DominatorTree &DT) { |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4283 | auto *Op = dyn_cast<Operator>(V); |
| 4284 | if (!Op) |
| 4285 | return None; |
| 4286 | |
| 4287 | // Implementation detail: all the cleverness here should happen without |
| 4288 | // creating new SCEV expressions -- our caller knowns tricks to avoid creating |
| 4289 | // SCEV expressions when possible, and we should not break that. |
| 4290 | |
| 4291 | switch (Op->getOpcode()) { |
| 4292 | case Instruction::Add: |
| 4293 | case Instruction::Sub: |
| 4294 | case Instruction::Mul: |
| 4295 | case Instruction::UDiv: |
Alexandre Isoard | 405728f | 2017-09-01 14:59:59 +0000 | [diff] [blame] | 4296 | case Instruction::URem: |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4297 | case Instruction::And: |
| 4298 | case Instruction::Or: |
| 4299 | case Instruction::AShr: |
| 4300 | case Instruction::Shl: |
| 4301 | return BinaryOp(Op); |
| 4302 | |
| 4303 | case Instruction::Xor: |
| 4304 | if (auto *RHSC = dyn_cast<ConstantInt>(Op->getOperand(1))) |
Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 4305 | // If the RHS of the xor is a signmask, then this is just an add. |
| 4306 | // Instcombine turns add of signmask into xor as a strength reduction step. |
| 4307 | if (RHSC->getValue().isSignMask()) |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4308 | return BinaryOp(Instruction::Add, Op->getOperand(0), Op->getOperand(1)); |
| 4309 | return BinaryOp(Op); |
| 4310 | |
| 4311 | case Instruction::LShr: |
| 4312 | // Turn logical shift right of a constant into a unsigned divide. |
| 4313 | if (ConstantInt *SA = dyn_cast<ConstantInt>(Op->getOperand(1))) { |
| 4314 | uint32_t BitWidth = cast<IntegerType>(Op->getType())->getBitWidth(); |
| 4315 | |
| 4316 | // If the shift count is not less than the bitwidth, the result of |
| 4317 | // the shift is undefined. Don't try to analyze it, because the |
| 4318 | // resolution chosen here may differ from the resolution chosen in |
| 4319 | // other parts of the compiler. |
| 4320 | if (SA->getValue().ult(BitWidth)) { |
| 4321 | Constant *X = |
| 4322 | ConstantInt::get(SA->getContext(), |
| 4323 | APInt::getOneBitSet(BitWidth, SA->getZExtValue())); |
| 4324 | return BinaryOp(Instruction::UDiv, Op->getOperand(0), X); |
| 4325 | } |
| 4326 | } |
| 4327 | return BinaryOp(Op); |
| 4328 | |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4329 | case Instruction::ExtractValue: { |
| 4330 | auto *EVI = cast<ExtractValueInst>(Op); |
| 4331 | if (EVI->getNumIndices() != 1 || EVI->getIndices()[0] != 0) |
| 4332 | break; |
| 4333 | |
| 4334 | auto *CI = dyn_cast<CallInst>(EVI->getAggregateOperand()); |
| 4335 | if (!CI) |
| 4336 | break; |
| 4337 | |
| 4338 | if (auto *F = CI->getCalledFunction()) |
| 4339 | switch (F->getIntrinsicID()) { |
| 4340 | case Intrinsic::sadd_with_overflow: |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4341 | case Intrinsic::uadd_with_overflow: |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4342 | if (!isOverflowIntrinsicNoWrap(cast<IntrinsicInst>(CI), DT)) |
| 4343 | return BinaryOp(Instruction::Add, CI->getArgOperand(0), |
| 4344 | CI->getArgOperand(1)); |
| 4345 | |
| 4346 | // Now that we know that all uses of the arithmetic-result component of |
| 4347 | // CI are guarded by the overflow check, we can go ahead and pretend |
| 4348 | // that the arithmetic is non-overflowing. |
| 4349 | if (F->getIntrinsicID() == Intrinsic::sadd_with_overflow) |
| 4350 | return BinaryOp(Instruction::Add, CI->getArgOperand(0), |
| 4351 | CI->getArgOperand(1), /* IsNSW = */ true, |
| 4352 | /* IsNUW = */ false); |
| 4353 | else |
| 4354 | return BinaryOp(Instruction::Add, CI->getArgOperand(0), |
| 4355 | CI->getArgOperand(1), /* IsNSW = */ false, |
| 4356 | /* IsNUW*/ true); |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4357 | case Intrinsic::ssub_with_overflow: |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4358 | case Intrinsic::usub_with_overflow: |
Amara Emerson | 56dca4e3 | 2017-08-04 20:19:46 +0000 | [diff] [blame] | 4359 | if (!isOverflowIntrinsicNoWrap(cast<IntrinsicInst>(CI), DT)) |
| 4360 | return BinaryOp(Instruction::Sub, CI->getArgOperand(0), |
| 4361 | CI->getArgOperand(1)); |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4362 | |
Amara Emerson | 56dca4e3 | 2017-08-04 20:19:46 +0000 | [diff] [blame] | 4363 | // The same reasoning as sadd/uadd above. |
| 4364 | if (F->getIntrinsicID() == Intrinsic::ssub_with_overflow) |
| 4365 | return BinaryOp(Instruction::Sub, CI->getArgOperand(0), |
| 4366 | CI->getArgOperand(1), /* IsNSW = */ true, |
| 4367 | /* IsNUW = */ false); |
| 4368 | else |
| 4369 | return BinaryOp(Instruction::Sub, CI->getArgOperand(0), |
| 4370 | CI->getArgOperand(1), /* IsNSW = */ false, |
| 4371 | /* IsNUW = */ true); |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4372 | case Intrinsic::smul_with_overflow: |
| 4373 | case Intrinsic::umul_with_overflow: |
| 4374 | return BinaryOp(Instruction::Mul, CI->getArgOperand(0), |
| 4375 | CI->getArgOperand(1)); |
| 4376 | default: |
| 4377 | break; |
| 4378 | } |
Adrian Prantl | 0e6694d | 2017-12-19 22:05:25 +0000 | [diff] [blame] | 4379 | break; |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 4380 | } |
| 4381 | |
Sanjoy Das | 118d919 | 2016-03-31 05:14:22 +0000 | [diff] [blame] | 4382 | default: |
| 4383 | break; |
| 4384 | } |
| 4385 | |
| 4386 | return None; |
| 4387 | } |
| 4388 | |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4389 | /// Helper function to createAddRecFromPHIWithCasts. We have a phi |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4390 | /// node whose symbolic (unknown) SCEV is \p SymbolicPHI, which is updated via |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4391 | /// the loop backedge by a SCEVAddExpr, possibly also with a few casts on the |
| 4392 | /// way. This function checks if \p Op, an operand of this SCEVAddExpr, |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4393 | /// follows one of the following patterns: |
| 4394 | /// Op == (SExt ix (Trunc iy (%SymbolicPHI) to ix) to iy) |
| 4395 | /// Op == (ZExt ix (Trunc iy (%SymbolicPHI) to ix) to iy) |
| 4396 | /// If the SCEV expression of \p Op conforms with one of the expected patterns |
| 4397 | /// we return the type of the truncation operation, and indicate whether the |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4398 | /// truncated type should be treated as signed/unsigned by setting |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4399 | /// \p Signed to true/false, respectively. |
| 4400 | static Type *isSimpleCastedPHI(const SCEV *Op, const SCEVUnknown *SymbolicPHI, |
| 4401 | bool &Signed, ScalarEvolution &SE) { |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4402 | // The case where Op == SymbolicPHI (that is, with no type conversions on |
| 4403 | // the way) is handled by the regular add recurrence creating logic and |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4404 | // would have already been triggered in createAddRecForPHI. Reaching it here |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4405 | // means that createAddRecFromPHI had failed for this PHI before (e.g., |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4406 | // because one of the other operands of the SCEVAddExpr updating this PHI is |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4407 | // not invariant). |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4408 | // |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4409 | // Here we look for the case where Op = (ext(trunc(SymbolicPHI))), and in |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4410 | // this case predicates that allow us to prove that Op == SymbolicPHI will |
| 4411 | // be added. |
| 4412 | if (Op == SymbolicPHI) |
| 4413 | return nullptr; |
| 4414 | |
| 4415 | unsigned SourceBits = SE.getTypeSizeInBits(SymbolicPHI->getType()); |
| 4416 | unsigned NewBits = SE.getTypeSizeInBits(Op->getType()); |
| 4417 | if (SourceBits != NewBits) |
| 4418 | return nullptr; |
| 4419 | |
| 4420 | const SCEVSignExtendExpr *SExt = dyn_cast<SCEVSignExtendExpr>(Op); |
| 4421 | const SCEVZeroExtendExpr *ZExt = dyn_cast<SCEVZeroExtendExpr>(Op); |
| 4422 | if (!SExt && !ZExt) |
| 4423 | return nullptr; |
| 4424 | const SCEVTruncateExpr *Trunc = |
| 4425 | SExt ? dyn_cast<SCEVTruncateExpr>(SExt->getOperand()) |
| 4426 | : dyn_cast<SCEVTruncateExpr>(ZExt->getOperand()); |
| 4427 | if (!Trunc) |
| 4428 | return nullptr; |
| 4429 | const SCEV *X = Trunc->getOperand(); |
| 4430 | if (X != SymbolicPHI) |
| 4431 | return nullptr; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4432 | Signed = SExt != nullptr; |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4433 | return Trunc->getType(); |
| 4434 | } |
| 4435 | |
| 4436 | static const Loop *isIntegerLoopHeaderPHI(const PHINode *PN, LoopInfo &LI) { |
| 4437 | if (!PN->getType()->isIntegerTy()) |
| 4438 | return nullptr; |
| 4439 | const Loop *L = LI.getLoopFor(PN->getParent()); |
| 4440 | if (!L || L->getHeader() != PN->getParent()) |
| 4441 | return nullptr; |
| 4442 | return L; |
| 4443 | } |
| 4444 | |
| 4445 | // Analyze \p SymbolicPHI, a SCEV expression of a phi node, and check if the |
| 4446 | // computation that updates the phi follows the following pattern: |
| 4447 | // (SExt/ZExt ix (Trunc iy (%SymbolicPHI) to ix) to iy) + InvariantAccum |
| 4448 | // which correspond to a phi->trunc->sext/zext->add->phi update chain. |
| 4449 | // If so, try to see if it can be rewritten as an AddRecExpr under some |
| 4450 | // Predicates. If successful, return them as a pair. Also cache the results |
| 4451 | // of the analysis. |
| 4452 | // |
| 4453 | // Example usage scenario: |
| 4454 | // Say the Rewriter is called for the following SCEV: |
| 4455 | // 8 * ((sext i32 (trunc i64 %X to i32) to i64) + %Step) |
| 4456 | // where: |
| 4457 | // %X = phi i64 (%Start, %BEValue) |
| 4458 | // It will visitMul->visitAdd->visitSExt->visitTrunc->visitUnknown(%X), |
| 4459 | // and call this function with %SymbolicPHI = %X. |
| 4460 | // |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4461 | // The analysis will find that the value coming around the backedge has |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4462 | // the following SCEV: |
| 4463 | // BEValue = ((sext i32 (trunc i64 %X to i32) to i64) + %Step) |
| 4464 | // Upon concluding that this matches the desired pattern, the function |
| 4465 | // will return the pair {NewAddRec, SmallPredsVec} where: |
| 4466 | // NewAddRec = {%Start,+,%Step} |
| 4467 | // SmallPredsVec = {P1, P2, P3} as follows: |
| 4468 | // P1(WrapPred): AR: {trunc(%Start),+,(trunc %Step)}<nsw> Flags: <nssw> |
| 4469 | // P2(EqualPred): %Start == (sext i32 (trunc i64 %Start to i32) to i64) |
| 4470 | // P3(EqualPred): %Step == (sext i32 (trunc i64 %Step to i32) to i64) |
| 4471 | // The returned pair means that SymbolicPHI can be rewritten into NewAddRec |
| 4472 | // under the predicates {P1,P2,P3}. |
| 4473 | // This predicated rewrite will be cached in PredicatedSCEVRewrites: |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4474 | // PredicatedSCEVRewrites[{%X,L}] = {NewAddRec, {P1,P2,P3)} |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4475 | // |
| 4476 | // TODO's: |
| 4477 | // |
| 4478 | // 1) Extend the Induction descriptor to also support inductions that involve |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4479 | // casts: When needed (namely, when we are called in the context of the |
| 4480 | // vectorizer induction analysis), a Set of cast instructions will be |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4481 | // populated by this method, and provided back to isInductionPHI. This is |
| 4482 | // needed to allow the vectorizer to properly record them to be ignored by |
| 4483 | // the cost model and to avoid vectorizing them (otherwise these casts, |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4484 | // which are redundant under the runtime overflow checks, will be |
| 4485 | // vectorized, which can be costly). |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4486 | // |
| 4487 | // 2) Support additional induction/PHISCEV patterns: We also want to support |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4488 | // inductions where the sext-trunc / zext-trunc operations (partly) occur |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4489 | // after the induction update operation (the induction increment): |
| 4490 | // |
| 4491 | // (Trunc iy (SExt/ZExt ix (%SymbolicPHI + InvariantAccum) to iy) to ix) |
| 4492 | // which correspond to a phi->add->trunc->sext/zext->phi update chain. |
| 4493 | // |
| 4494 | // (Trunc iy ((SExt/ZExt ix (%SymbolicPhi) to iy) + InvariantAccum) to ix) |
| 4495 | // which correspond to a phi->trunc->add->sext/zext->phi update chain. |
| 4496 | // |
| 4497 | // 3) Outline common code with createAddRecFromPHI to avoid duplication. |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4498 | Optional<std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>>> |
| 4499 | ScalarEvolution::createAddRecFromPHIWithCastsImpl(const SCEVUnknown *SymbolicPHI) { |
| 4500 | SmallVector<const SCEVPredicate *, 3> Predicates; |
| 4501 | |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4502 | // *** Part1: Analyze if we have a phi-with-cast pattern for which we can |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4503 | // return an AddRec expression under some predicate. |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4504 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4505 | auto *PN = cast<PHINode>(SymbolicPHI->getValue()); |
| 4506 | const Loop *L = isIntegerLoopHeaderPHI(PN, LI); |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4507 | assert(L && "Expecting an integer loop header phi"); |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4508 | |
| 4509 | // The loop may have multiple entrances or multiple exits; we can analyze |
| 4510 | // this phi as an addrec if it has a unique entry value and a unique |
| 4511 | // backedge value. |
| 4512 | Value *BEValueV = nullptr, *StartValueV = nullptr; |
| 4513 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { |
| 4514 | Value *V = PN->getIncomingValue(i); |
| 4515 | if (L->contains(PN->getIncomingBlock(i))) { |
| 4516 | if (!BEValueV) { |
| 4517 | BEValueV = V; |
| 4518 | } else if (BEValueV != V) { |
| 4519 | BEValueV = nullptr; |
| 4520 | break; |
| 4521 | } |
| 4522 | } else if (!StartValueV) { |
| 4523 | StartValueV = V; |
| 4524 | } else if (StartValueV != V) { |
| 4525 | StartValueV = nullptr; |
| 4526 | break; |
| 4527 | } |
| 4528 | } |
| 4529 | if (!BEValueV || !StartValueV) |
| 4530 | return None; |
| 4531 | |
| 4532 | const SCEV *BEValue = getSCEV(BEValueV); |
| 4533 | |
| 4534 | // If the value coming around the backedge is an add with the symbolic |
| 4535 | // value we just inserted, possibly with casts that we can ignore under |
| 4536 | // an appropriate runtime guard, then we found a simple induction variable! |
| 4537 | const auto *Add = dyn_cast<SCEVAddExpr>(BEValue); |
| 4538 | if (!Add) |
| 4539 | return None; |
| 4540 | |
| 4541 | // If there is a single occurrence of the symbolic value, possibly |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4542 | // casted, replace it with a recurrence. |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4543 | unsigned FoundIndex = Add->getNumOperands(); |
| 4544 | Type *TruncTy = nullptr; |
| 4545 | bool Signed; |
| 4546 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4547 | if ((TruncTy = |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4548 | isSimpleCastedPHI(Add->getOperand(i), SymbolicPHI, Signed, *this))) |
| 4549 | if (FoundIndex == e) { |
| 4550 | FoundIndex = i; |
| 4551 | break; |
| 4552 | } |
| 4553 | |
| 4554 | if (FoundIndex == Add->getNumOperands()) |
| 4555 | return None; |
| 4556 | |
| 4557 | // Create an add with everything but the specified operand. |
| 4558 | SmallVector<const SCEV *, 8> Ops; |
| 4559 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
| 4560 | if (i != FoundIndex) |
| 4561 | Ops.push_back(Add->getOperand(i)); |
| 4562 | const SCEV *Accum = getAddExpr(Ops); |
| 4563 | |
| 4564 | // The runtime checks will not be valid if the step amount is |
| 4565 | // varying inside the loop. |
| 4566 | if (!isLoopInvariant(Accum, L)) |
| 4567 | return None; |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4568 | |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4569 | // *** Part2: Create the predicates |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4570 | |
| 4571 | // Analysis was successful: we have a phi-with-cast pattern for which we |
| 4572 | // can return an AddRec expression under the following predicates: |
| 4573 | // |
| 4574 | // P1: A Wrap predicate that guarantees that Trunc(Start) + i*Trunc(Accum) |
| 4575 | // fits within the truncated type (does not overflow) for i = 0 to n-1. |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4576 | // P2: An Equal predicate that guarantees that |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4577 | // Start = (Ext ix (Trunc iy (Start) to ix) to iy) |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4578 | // P3: An Equal predicate that guarantees that |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4579 | // Accum = (Ext ix (Trunc iy (Accum) to ix) to iy) |
| 4580 | // |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4581 | // As we next prove, the above predicates guarantee that: |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4582 | // Start + i*Accum = (Ext ix (Trunc iy ( Start + i*Accum ) to ix) to iy) |
| 4583 | // |
| 4584 | // |
| 4585 | // More formally, we want to prove that: |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4586 | // Expr(i+1) = Start + (i+1) * Accum |
| 4587 | // = (Ext ix (Trunc iy (Expr(i)) to ix) to iy) + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4588 | // |
| 4589 | // Given that: |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4590 | // 1) Expr(0) = Start |
| 4591 | // 2) Expr(1) = Start + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4592 | // = (Ext ix (Trunc iy (Start) to ix) to iy) + Accum :: from P2 |
| 4593 | // 3) Induction hypothesis (step i): |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4594 | // Expr(i) = (Ext ix (Trunc iy (Expr(i-1)) to ix) to iy) + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4595 | // |
| 4596 | // Proof: |
| 4597 | // Expr(i+1) = |
| 4598 | // = Start + (i+1)*Accum |
| 4599 | // = (Start + i*Accum) + Accum |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4600 | // = Expr(i) + Accum |
| 4601 | // = (Ext ix (Trunc iy (Expr(i-1)) to ix) to iy) + Accum + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4602 | // :: from step i |
| 4603 | // |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4604 | // = (Ext ix (Trunc iy (Start + (i-1)*Accum) to ix) to iy) + Accum + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4605 | // |
| 4606 | // = (Ext ix (Trunc iy (Start + (i-1)*Accum) to ix) to iy) |
| 4607 | // + (Ext ix (Trunc iy (Accum) to ix) to iy) |
| 4608 | // + Accum :: from P3 |
| 4609 | // |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4610 | // = (Ext ix (Trunc iy ((Start + (i-1)*Accum) + Accum) to ix) to iy) |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4611 | // + Accum :: from P1: Ext(x)+Ext(y)=>Ext(x+y) |
| 4612 | // |
| 4613 | // = (Ext ix (Trunc iy (Start + i*Accum) to ix) to iy) + Accum |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4614 | // = (Ext ix (Trunc iy (Expr(i)) to ix) to iy) + Accum |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4615 | // |
| 4616 | // By induction, the same applies to all iterations 1<=i<n: |
| 4617 | // |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4618 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4619 | // Create a truncated addrec for which we will add a no overflow check (P1). |
| 4620 | const SCEV *StartVal = getSCEV(StartValueV); |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4621 | const SCEV *PHISCEV = |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4622 | getAddRecExpr(getTruncateExpr(StartVal, TruncTy), |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4623 | getTruncateExpr(Accum, TruncTy), L, SCEV::FlagAnyWrap); |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4624 | |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4625 | // PHISCEV can be either a SCEVConstant or a SCEVAddRecExpr. |
| 4626 | // ex: If truncated Accum is 0 and StartVal is a constant, then PHISCEV |
| 4627 | // will be constant. |
| 4628 | // |
| 4629 | // If PHISCEV is a constant, then P1 degenerates into P2 or P3, so we don't |
| 4630 | // add P1. |
| 4631 | if (const auto *AR = dyn_cast<SCEVAddRecExpr>(PHISCEV)) { |
| 4632 | SCEVWrapPredicate::IncrementWrapFlags AddedFlags = |
| 4633 | Signed ? SCEVWrapPredicate::IncrementNSSW |
| 4634 | : SCEVWrapPredicate::IncrementNUSW; |
| 4635 | const SCEVPredicate *AddRecPred = getWrapPredicate(AR, AddedFlags); |
| 4636 | Predicates.push_back(AddRecPred); |
Daniel Neilson | 5acfd1d | 2017-10-11 19:05:14 +0000 | [diff] [blame] | 4637 | } |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4638 | |
| 4639 | // Create the Equal Predicates P2,P3: |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4640 | |
| 4641 | // It is possible that the predicates P2 and/or P3 are computable at |
| 4642 | // compile time due to StartVal and/or Accum being constants. |
| 4643 | // If either one is, then we can check that now and escape if either P2 |
| 4644 | // or P3 is false. |
| 4645 | |
| 4646 | // Construct the extended SCEV: (Ext ix (Trunc iy (Expr) to ix) to iy) |
| 4647 | // for each of StartVal and Accum |
Dorit Nuzman | 5809e70 | 2017-12-10 11:13:35 +0000 | [diff] [blame] | 4648 | auto getExtendedExpr = [&](const SCEV *Expr, |
| 4649 | bool CreateSignExtend) -> const SCEV * { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 4650 | assert(isLoopInvariant(Expr, L) && "Expr is expected to be invariant"); |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4651 | const SCEV *TruncatedExpr = getTruncateExpr(Expr, TruncTy); |
| 4652 | const SCEV *ExtendedExpr = |
Dorit Nuzman | 5809e70 | 2017-12-10 11:13:35 +0000 | [diff] [blame] | 4653 | CreateSignExtend ? getSignExtendExpr(TruncatedExpr, Expr->getType()) |
| 4654 | : getZeroExtendExpr(TruncatedExpr, Expr->getType()); |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4655 | return ExtendedExpr; |
| 4656 | }; |
| 4657 | |
| 4658 | // Given: |
| 4659 | // ExtendedExpr = (Ext ix (Trunc iy (Expr) to ix) to iy |
Dorit Nuzman | 5809e70 | 2017-12-10 11:13:35 +0000 | [diff] [blame] | 4660 | // = getExtendedExpr(Expr) |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4661 | // Determine whether the predicate P: Expr == ExtendedExpr |
| 4662 | // is known to be false at compile time |
| 4663 | auto PredIsKnownFalse = [&](const SCEV *Expr, |
| 4664 | const SCEV *ExtendedExpr) -> bool { |
| 4665 | return Expr != ExtendedExpr && |
| 4666 | isKnownPredicate(ICmpInst::ICMP_NE, Expr, ExtendedExpr); |
| 4667 | }; |
| 4668 | |
Dorit Nuzman | 5809e70 | 2017-12-10 11:13:35 +0000 | [diff] [blame] | 4669 | const SCEV *StartExtended = getExtendedExpr(StartVal, Signed); |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4670 | if (PredIsKnownFalse(StartVal, StartExtended)) { |
| 4671 | DEBUG(dbgs() << "P2 is compile-time false\n";); |
| 4672 | return None; |
| 4673 | } |
| 4674 | |
Dorit Nuzman | 5809e70 | 2017-12-10 11:13:35 +0000 | [diff] [blame] | 4675 | // The Step is always Signed (because the overflow checks are either |
| 4676 | // NSSW or NUSW) |
| 4677 | const SCEV *AccumExtended = getExtendedExpr(Accum, /*CreateSignExtend=*/true); |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4678 | if (PredIsKnownFalse(Accum, AccumExtended)) { |
| 4679 | DEBUG(dbgs() << "P3 is compile-time false\n";); |
| 4680 | return None; |
| 4681 | } |
| 4682 | |
| 4683 | auto AppendPredicate = [&](const SCEV *Expr, |
| 4684 | const SCEV *ExtendedExpr) -> void { |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4685 | if (Expr != ExtendedExpr && |
| 4686 | !isKnownPredicate(ICmpInst::ICMP_EQ, Expr, ExtendedExpr)) { |
| 4687 | const SCEVPredicate *Pred = getEqualPredicate(Expr, ExtendedExpr); |
| 4688 | DEBUG (dbgs() << "Added Predicate: " << *Pred); |
| 4689 | Predicates.push_back(Pred); |
| 4690 | } |
| 4691 | }; |
Daniel Neilson | 3f0e4ad | 2017-09-05 19:54:03 +0000 | [diff] [blame] | 4692 | |
| 4693 | AppendPredicate(StartVal, StartExtended); |
| 4694 | AppendPredicate(Accum, AccumExtended); |
| 4695 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4696 | // *** Part3: Predicates are ready. Now go ahead and create the new addrec in |
| 4697 | // which the casts had been folded away. The caller can rewrite SymbolicPHI |
| 4698 | // into NewAR if it will also add the runtime overflow checks specified in |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4699 | // Predicates. |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4700 | auto *NewAR = getAddRecExpr(StartVal, Accum, L, SCEV::FlagAnyWrap); |
| 4701 | |
| 4702 | std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>> PredRewrite = |
| 4703 | std::make_pair(NewAR, Predicates); |
| 4704 | // Remember the result of the analysis for this SCEV at this locayyytion. |
| 4705 | PredicatedSCEVRewrites[{SymbolicPHI, L}] = PredRewrite; |
| 4706 | return PredRewrite; |
| 4707 | } |
| 4708 | |
| 4709 | Optional<std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>>> |
| 4710 | ScalarEvolution::createAddRecFromPHIWithCasts(const SCEVUnknown *SymbolicPHI) { |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4711 | auto *PN = cast<PHINode>(SymbolicPHI->getValue()); |
| 4712 | const Loop *L = isIntegerLoopHeaderPHI(PN, LI); |
| 4713 | if (!L) |
| 4714 | return None; |
| 4715 | |
| 4716 | // Check to see if we already analyzed this PHI. |
| 4717 | auto I = PredicatedSCEVRewrites.find({SymbolicPHI, L}); |
| 4718 | if (I != PredicatedSCEVRewrites.end()) { |
| 4719 | std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>> Rewrite = |
| 4720 | I->second; |
| 4721 | // Analysis was done before and failed to create an AddRec: |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 4722 | if (Rewrite.first == SymbolicPHI) |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 4723 | return None; |
| 4724 | // Analysis was done before and succeeded to create an AddRec under |
| 4725 | // a predicate: |
| 4726 | assert(isa<SCEVAddRecExpr>(Rewrite.first) && "Expected an AddRec"); |
| 4727 | assert(!(Rewrite.second).empty() && "Expected to find Predicates"); |
| 4728 | return Rewrite; |
| 4729 | } |
| 4730 | |
| 4731 | Optional<std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>>> |
| 4732 | Rewrite = createAddRecFromPHIWithCastsImpl(SymbolicPHI); |
| 4733 | |
| 4734 | // Record in the cache that the analysis failed |
| 4735 | if (!Rewrite) { |
| 4736 | SmallVector<const SCEVPredicate *, 3> Predicates; |
| 4737 | PredicatedSCEVRewrites[{SymbolicPHI, L}] = {SymbolicPHI, Predicates}; |
| 4738 | return None; |
| 4739 | } |
| 4740 | |
| 4741 | return Rewrite; |
| 4742 | } |
| 4743 | |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4744 | // FIXME: This utility is currently required because the Rewriter currently |
| 4745 | // does not rewrite this expression: |
| 4746 | // {0, +, (sext ix (trunc iy to ix) to iy)} |
| 4747 | // into {0, +, %step}, |
| 4748 | // even when the following Equal predicate exists: |
| 4749 | // "%step == (sext ix (trunc iy to ix) to iy)". |
| 4750 | bool PredicatedScalarEvolution::areAddRecsEqualWithPreds( |
| 4751 | const SCEVAddRecExpr *AR1, const SCEVAddRecExpr *AR2) const { |
| 4752 | if (AR1 == AR2) |
| 4753 | return true; |
| 4754 | |
| 4755 | auto areExprsEqual = [&](const SCEV *Expr1, const SCEV *Expr2) -> bool { |
| 4756 | if (Expr1 != Expr2 && !Preds.implies(SE.getEqualPredicate(Expr1, Expr2)) && |
| 4757 | !Preds.implies(SE.getEqualPredicate(Expr2, Expr1))) |
| 4758 | return false; |
| 4759 | return true; |
| 4760 | }; |
| 4761 | |
| 4762 | if (!areExprsEqual(AR1->getStart(), AR2->getStart()) || |
| 4763 | !areExprsEqual(AR1->getStepRecurrence(SE), AR2->getStepRecurrence(SE))) |
| 4764 | return false; |
| 4765 | return true; |
| 4766 | } |
| 4767 | |
Michael Zolotukhin | 37162ad | 2017-05-03 23:53:38 +0000 | [diff] [blame] | 4768 | /// A helper function for createAddRecFromPHI to handle simple cases. |
| 4769 | /// |
| 4770 | /// This function tries to find an AddRec expression for the simplest (yet most |
| 4771 | /// common) cases: PN = PHI(Start, OP(Self, LoopInvariant)). |
| 4772 | /// If it fails, createAddRecFromPHI will use a more general, but slow, |
| 4773 | /// technique for finding the AddRec expression. |
| 4774 | const SCEV *ScalarEvolution::createSimpleAffineAddRec(PHINode *PN, |
| 4775 | Value *BEValueV, |
| 4776 | Value *StartValueV) { |
| 4777 | const Loop *L = LI.getLoopFor(PN->getParent()); |
| 4778 | assert(L && L->getHeader() == PN->getParent()); |
| 4779 | assert(BEValueV && StartValueV); |
| 4780 | |
| 4781 | auto BO = MatchBinaryOp(BEValueV, DT); |
| 4782 | if (!BO) |
| 4783 | return nullptr; |
| 4784 | |
| 4785 | if (BO->Opcode != Instruction::Add) |
| 4786 | return nullptr; |
| 4787 | |
| 4788 | const SCEV *Accum = nullptr; |
| 4789 | if (BO->LHS == PN && L->isLoopInvariant(BO->RHS)) |
| 4790 | Accum = getSCEV(BO->RHS); |
| 4791 | else if (BO->RHS == PN && L->isLoopInvariant(BO->LHS)) |
| 4792 | Accum = getSCEV(BO->LHS); |
| 4793 | |
| 4794 | if (!Accum) |
| 4795 | return nullptr; |
| 4796 | |
| 4797 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
| 4798 | if (BO->IsNUW) |
| 4799 | Flags = setFlags(Flags, SCEV::FlagNUW); |
| 4800 | if (BO->IsNSW) |
| 4801 | Flags = setFlags(Flags, SCEV::FlagNSW); |
| 4802 | |
| 4803 | const SCEV *StartVal = getSCEV(StartValueV); |
| 4804 | const SCEV *PHISCEV = getAddRecExpr(StartVal, Accum, L, Flags); |
| 4805 | |
| 4806 | ValueExprMap[SCEVCallbackVH(PN, this)] = PHISCEV; |
| 4807 | |
| 4808 | // We can add Flags to the post-inc expression only if we |
Michael Zolotukhin | 3207d30 | 2017-05-04 17:42:34 +0000 | [diff] [blame] | 4809 | // know that it is *undefined behavior* for BEValueV to |
Michael Zolotukhin | 37162ad | 2017-05-03 23:53:38 +0000 | [diff] [blame] | 4810 | // overflow. |
| 4811 | if (auto *BEInst = dyn_cast<Instruction>(BEValueV)) |
| 4812 | if (isLoopInvariant(Accum, L) && isAddRecNeverPoison(BEInst, L)) |
| 4813 | (void)getAddRecExpr(getAddExpr(StartVal, Accum), Accum, L, Flags); |
| 4814 | |
| 4815 | return PHISCEV; |
| 4816 | } |
| 4817 | |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4818 | const SCEV *ScalarEvolution::createAddRecFromPHI(PHINode *PN) { |
| 4819 | const Loop *L = LI.getLoopFor(PN->getParent()); |
| 4820 | if (!L || L->getHeader() != PN->getParent()) |
| 4821 | return nullptr; |
| 4822 | |
| 4823 | // The loop may have multiple entrances or multiple exits; we can analyze |
| 4824 | // this phi as an addrec if it has a unique entry value and a unique |
| 4825 | // backedge value. |
| 4826 | Value *BEValueV = nullptr, *StartValueV = nullptr; |
| 4827 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { |
| 4828 | Value *V = PN->getIncomingValue(i); |
| 4829 | if (L->contains(PN->getIncomingBlock(i))) { |
| 4830 | if (!BEValueV) { |
| 4831 | BEValueV = V; |
| 4832 | } else if (BEValueV != V) { |
| 4833 | BEValueV = nullptr; |
| 4834 | break; |
| 4835 | } |
| 4836 | } else if (!StartValueV) { |
| 4837 | StartValueV = V; |
| 4838 | } else if (StartValueV != V) { |
| 4839 | StartValueV = nullptr; |
| 4840 | break; |
| 4841 | } |
| 4842 | } |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4843 | if (!BEValueV || !StartValueV) |
| 4844 | return nullptr; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4845 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4846 | assert(ValueExprMap.find_as(PN) == ValueExprMap.end() && |
| 4847 | "PHI node already processed?"); |
Michael Zolotukhin | 37162ad | 2017-05-03 23:53:38 +0000 | [diff] [blame] | 4848 | |
| 4849 | // First, try to find AddRec expression without creating a fictituos symbolic |
| 4850 | // value for PN. |
| 4851 | if (auto *S = createSimpleAffineAddRec(PN, BEValueV, StartValueV)) |
| 4852 | return S; |
| 4853 | |
| 4854 | // Handle PHI node value symbolically. |
| 4855 | const SCEV *SymbolicName = getUnknown(PN); |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4856 | ValueExprMap.insert({SCEVCallbackVH(PN, this), SymbolicName}); |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4857 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4858 | // Using this symbolic name for the PHI, analyze the value coming around |
| 4859 | // the back-edge. |
| 4860 | const SCEV *BEValue = getSCEV(BEValueV); |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4861 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4862 | // NOTE: If BEValue is loop invariant, we know that the PHI node just |
| 4863 | // has a special value for the first iteration of the loop. |
| 4864 | |
| 4865 | // If the value coming around the backedge is an add with the symbolic |
| 4866 | // value we just inserted, then we found a simple induction variable! |
| 4867 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(BEValue)) { |
| 4868 | // If there is a single occurrence of the symbolic value, replace it |
| 4869 | // with a recurrence. |
| 4870 | unsigned FoundIndex = Add->getNumOperands(); |
| 4871 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
| 4872 | if (Add->getOperand(i) == SymbolicName) |
| 4873 | if (FoundIndex == e) { |
| 4874 | FoundIndex = i; |
| 4875 | break; |
| 4876 | } |
| 4877 | |
| 4878 | if (FoundIndex != Add->getNumOperands()) { |
| 4879 | // Create an add with everything but the specified operand. |
| 4880 | SmallVector<const SCEV *, 8> Ops; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4881 | for (unsigned i = 0, e = Add->getNumOperands(); i != e; ++i) |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4882 | if (i != FoundIndex) |
Jatin Bhateja | c61ade1 | 2017-11-13 16:43:24 +0000 | [diff] [blame] | 4883 | Ops.push_back(SCEVBackedgeConditionFolder::rewrite(Add->getOperand(i), |
| 4884 | L, *this)); |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4885 | const SCEV *Accum = getAddExpr(Ops); |
| 4886 | |
| 4887 | // This is not a valid addrec if the step amount is varying each |
| 4888 | // loop iteration, but is not itself an addrec in this loop. |
Sanjoy Das | 2f27456 | 2017-10-18 22:00:57 +0000 | [diff] [blame] | 4889 | if (isLoopInvariant(Accum, L) || |
| 4890 | (isa<SCEVAddRecExpr>(Accum) && |
| 4891 | cast<SCEVAddRecExpr>(Accum)->getLoop() == L)) { |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4892 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
| 4893 | |
| 4894 | if (auto BO = MatchBinaryOp(BEValueV, DT)) { |
| 4895 | if (BO->Opcode == Instruction::Add && BO->LHS == PN) { |
| 4896 | if (BO->IsNUW) |
| 4897 | Flags = setFlags(Flags, SCEV::FlagNUW); |
| 4898 | if (BO->IsNSW) |
| 4899 | Flags = setFlags(Flags, SCEV::FlagNSW); |
| 4900 | } |
| 4901 | } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(BEValueV)) { |
| 4902 | // If the increment is an inbounds GEP, then we know the address |
| 4903 | // space cannot be wrapped around. We cannot make any guarantee |
| 4904 | // about signed or unsigned overflow because pointers are |
| 4905 | // unsigned but we may have a negative index from the base |
| 4906 | // pointer. We can guarantee that no unsigned wrap occurs if the |
| 4907 | // indices form a positive value. |
| 4908 | if (GEP->isInBounds() && GEP->getOperand(0) == PN) { |
| 4909 | Flags = setFlags(Flags, SCEV::FlagNW); |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4910 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4911 | const SCEV *Ptr = getSCEV(GEP->getPointerOperand()); |
| 4912 | if (isKnownPositive(getMinusSCEV(getSCEV(GEP), Ptr))) |
| 4913 | Flags = setFlags(Flags, SCEV::FlagNUW); |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 4914 | } |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4915 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4916 | // We cannot transfer nuw and nsw flags from subtraction |
| 4917 | // operations -- sub nuw X, Y is not the same as add nuw X, -Y |
| 4918 | // for instance. |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 4919 | } |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4920 | |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4921 | const SCEV *StartVal = getSCEV(StartValueV); |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4922 | const SCEV *PHISCEV = getAddRecExpr(StartVal, Accum, L, Flags); |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4923 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4924 | // Okay, for the entire analysis of this edge we assumed the PHI |
| 4925 | // to be symbolic. We now need to go back and purge all of the |
| 4926 | // entries for the scalars that use the symbolic expression. |
| 4927 | forgetSymbolicName(PN, SymbolicName); |
| 4928 | ValueExprMap[SCEVCallbackVH(PN, this)] = PHISCEV; |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4929 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4930 | // We can add Flags to the post-inc expression only if we |
Michael Zolotukhin | 3207d30 | 2017-05-04 17:42:34 +0000 | [diff] [blame] | 4931 | // know that it is *undefined behavior* for BEValueV to |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4932 | // overflow. |
| 4933 | if (auto *BEInst = dyn_cast<Instruction>(BEValueV)) |
| 4934 | if (isLoopInvariant(Accum, L) && isAddRecNeverPoison(BEInst, L)) |
| 4935 | (void)getAddRecExpr(getAddExpr(StartVal, Accum), Accum, L, Flags); |
Dorit Nuzman | 4750c78 | 2017-12-14 07:56:31 +0000 | [diff] [blame] | 4936 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4937 | return PHISCEV; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 4938 | } |
Dan Gohman | 6635bb2 | 2010-04-12 07:49:36 +0000 | [diff] [blame] | 4939 | } |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4940 | } else { |
| 4941 | // Otherwise, this could be a loop like this: |
| 4942 | // i = 0; for (j = 1; ..; ++j) { .... i = j; } |
| 4943 | // In this case, j = {1,+,1} and BEValue is j. |
| 4944 | // Because the other in-value of i (0) fits the evolution of BEValue |
| 4945 | // i really is an addrec evolution. |
| 4946 | // |
| 4947 | // We can generalize this saying that i is the shifted value of BEValue |
| 4948 | // by one iteration: |
| 4949 | // PHI(f(0), f({1,+,1})) --> f({0,+,1}) |
| 4950 | const SCEV *Shifted = SCEVShiftRewriter::rewrite(BEValue, L, *this); |
| 4951 | const SCEV *Start = SCEVInitRewriter::rewrite(Shifted, L, *this); |
| 4952 | if (Shifted != getCouldNotCompute() && |
| 4953 | Start != getCouldNotCompute()) { |
| 4954 | const SCEV *StartVal = getSCEV(StartValueV); |
| 4955 | if (Start == StartVal) { |
| 4956 | // Okay, for the entire analysis of this edge we assumed the PHI |
| 4957 | // to be symbolic. We now need to go back and purge all of the |
| 4958 | // entries for the scalars that use the symbolic expression. |
| 4959 | forgetSymbolicName(PN, SymbolicName); |
| 4960 | ValueExprMap[SCEVCallbackVH(PN, this)] = Shifted; |
| 4961 | return Shifted; |
| 4962 | } |
| 4963 | } |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4964 | } |
| 4965 | |
Michael Zolotukhin | 146a221 | 2017-04-28 22:14:27 +0000 | [diff] [blame] | 4966 | // Remove the temporary PHI node SCEV that has been inserted while intending |
| 4967 | // to create an AddRecExpr for this PHI node. We can not keep this temporary |
| 4968 | // as it will prevent later (possibly simpler) SCEV expressions to be added |
| 4969 | // to the ValueExprMap. |
| 4970 | eraseValueFromMap(PN); |
| 4971 | |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4972 | return nullptr; |
| 4973 | } |
| 4974 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 4975 | // Checks if the SCEV S is available at BB. S is considered available at BB |
| 4976 | // if S can be materialized at BB without introducing a fault. |
| 4977 | static bool IsAvailableOnEntry(const Loop *L, DominatorTree &DT, const SCEV *S, |
| 4978 | BasicBlock *BB) { |
| 4979 | struct CheckAvailable { |
| 4980 | bool TraversalDone = false; |
| 4981 | bool Available = true; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4982 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 4983 | const Loop *L = nullptr; // The loop BB is in (can be nullptr) |
| 4984 | BasicBlock *BB = nullptr; |
| 4985 | DominatorTree &DT; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4986 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 4987 | CheckAvailable(const Loop *L, BasicBlock *BB, DominatorTree &DT) |
| 4988 | : L(L), BB(BB), DT(DT) {} |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4989 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 4990 | bool setUnavailable() { |
| 4991 | TraversalDone = true; |
| 4992 | Available = false; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4993 | return false; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 4994 | } |
| 4995 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 4996 | bool follow(const SCEV *S) { |
| 4997 | switch (S->getSCEVType()) { |
| 4998 | case scConstant: case scTruncate: case scZeroExtend: case scSignExtend: |
| 4999 | case scAddExpr: case scMulExpr: case scUMaxExpr: case scSMaxExpr: |
Sanjoy Das | bb5ffc5 | 2015-10-24 05:37:28 +0000 | [diff] [blame] | 5000 | // These expressions are available if their operand(s) is/are. |
| 5001 | return true; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5002 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5003 | case scAddRecExpr: { |
| 5004 | // We allow add recurrences that are on the loop BB is in, or some |
| 5005 | // outer loop. This guarantees availability because the value of the |
| 5006 | // add recurrence at BB is simply the "current" value of the induction |
| 5007 | // variable. We can relax this in the future; for instance an add |
| 5008 | // recurrence on a sibling dominating loop is also available at BB. |
| 5009 | const auto *ARLoop = cast<SCEVAddRecExpr>(S)->getLoop(); |
| 5010 | if (L && (ARLoop == L || ARLoop->contains(L))) |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5011 | return true; |
| 5012 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5013 | return setUnavailable(); |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5014 | } |
| 5015 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5016 | case scUnknown: { |
| 5017 | // For SCEVUnknown, we check for simple dominance. |
| 5018 | const auto *SU = cast<SCEVUnknown>(S); |
| 5019 | Value *V = SU->getValue(); |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5020 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5021 | if (isa<Argument>(V)) |
| 5022 | return false; |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5023 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5024 | if (isa<Instruction>(V) && DT.dominates(cast<Instruction>(V), BB)) |
| 5025 | return false; |
| 5026 | |
| 5027 | return setUnavailable(); |
| 5028 | } |
| 5029 | |
| 5030 | case scUDivExpr: |
| 5031 | case scCouldNotCompute: |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 5032 | // We do not try to smart about these at all. |
| 5033 | return setUnavailable(); |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5034 | } |
| 5035 | llvm_unreachable("switch should be fully covered!"); |
| 5036 | } |
| 5037 | |
| 5038 | bool isDone() { return TraversalDone; } |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5039 | }; |
| 5040 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5041 | CheckAvailable CA(L, BB, DT); |
| 5042 | SCEVTraversal<CheckAvailable> ST(CA); |
| 5043 | |
| 5044 | ST.visitAll(S); |
| 5045 | return CA.Available; |
| 5046 | } |
| 5047 | |
| 5048 | // Try to match a control flow sequence that branches out at BI and merges back |
| 5049 | // at Merge into a "C ? LHS : RHS" select pattern. Return true on a successful |
| 5050 | // match. |
| 5051 | static bool BrPHIToSelect(DominatorTree &DT, BranchInst *BI, PHINode *Merge, |
| 5052 | Value *&C, Value *&LHS, Value *&RHS) { |
| 5053 | C = BI->getCondition(); |
| 5054 | |
| 5055 | BasicBlockEdge LeftEdge(BI->getParent(), BI->getSuccessor(0)); |
| 5056 | BasicBlockEdge RightEdge(BI->getParent(), BI->getSuccessor(1)); |
| 5057 | |
| 5058 | if (!LeftEdge.isSingleEdge()) |
| 5059 | return false; |
| 5060 | |
| 5061 | assert(RightEdge.isSingleEdge() && "Follows from LeftEdge.isSingleEdge()"); |
| 5062 | |
| 5063 | Use &LeftUse = Merge->getOperandUse(0); |
| 5064 | Use &RightUse = Merge->getOperandUse(1); |
| 5065 | |
| 5066 | if (DT.dominates(LeftEdge, LeftUse) && DT.dominates(RightEdge, RightUse)) { |
| 5067 | LHS = LeftUse; |
| 5068 | RHS = RightUse; |
| 5069 | return true; |
| 5070 | } |
| 5071 | |
| 5072 | if (DT.dominates(LeftEdge, RightUse) && DT.dominates(RightEdge, LeftUse)) { |
| 5073 | LHS = RightUse; |
| 5074 | RHS = LeftUse; |
| 5075 | return true; |
| 5076 | } |
| 5077 | |
| 5078 | return false; |
| 5079 | } |
| 5080 | |
| 5081 | const SCEV *ScalarEvolution::createNodeFromSelectLikePHI(PHINode *PN) { |
Sanjoy Das | b0b4e86 | 2016-08-05 18:34:14 +0000 | [diff] [blame] | 5082 | auto IsReachable = |
| 5083 | [&](BasicBlock *BB) { return DT.isReachableFromEntry(BB); }; |
| 5084 | if (PN->getNumIncomingValues() == 2 && all_of(PN->blocks(), IsReachable)) { |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5085 | const Loop *L = LI.getLoopFor(PN->getParent()); |
| 5086 | |
Sanjoy Das | 337d478 | 2015-10-31 23:21:40 +0000 | [diff] [blame] | 5087 | // We don't want to break LCSSA, even in a SCEV expression tree. |
| 5088 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) |
| 5089 | if (LI.getLoopFor(PN->getIncomingBlock(i)) != L) |
| 5090 | return nullptr; |
| 5091 | |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5092 | // Try to match |
| 5093 | // |
| 5094 | // br %cond, label %left, label %right |
| 5095 | // left: |
| 5096 | // br label %merge |
| 5097 | // right: |
| 5098 | // br label %merge |
| 5099 | // merge: |
| 5100 | // V = phi [ %x, %left ], [ %y, %right ] |
| 5101 | // |
| 5102 | // as "select %cond, %x, %y" |
| 5103 | |
| 5104 | BasicBlock *IDom = DT[PN->getParent()]->getIDom()->getBlock(); |
| 5105 | assert(IDom && "At least the entry block should dominate PN"); |
| 5106 | |
| 5107 | auto *BI = dyn_cast<BranchInst>(IDom->getTerminator()); |
| 5108 | Value *Cond = nullptr, *LHS = nullptr, *RHS = nullptr; |
| 5109 | |
Sanjoy Das | 1cd930b | 2015-10-03 00:34:19 +0000 | [diff] [blame] | 5110 | if (BI && BI->isConditional() && |
| 5111 | BrPHIToSelect(DT, BI, PN, Cond, LHS, RHS) && |
| 5112 | IsAvailableOnEntry(L, DT, getSCEV(LHS), PN->getParent()) && |
| 5113 | IsAvailableOnEntry(L, DT, getSCEV(RHS), PN->getParent())) |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5114 | return createNodeForSelectOrPHI(PN, Cond, LHS, RHS); |
| 5115 | } |
| 5116 | |
| 5117 | return nullptr; |
| 5118 | } |
| 5119 | |
| 5120 | const SCEV *ScalarEvolution::createNodeForPHI(PHINode *PN) { |
| 5121 | if (const SCEV *S = createAddRecFromPHI(PN)) |
| 5122 | return S; |
| 5123 | |
| 5124 | if (const SCEV *S = createNodeFromSelectLikePHI(PN)) |
| 5125 | return S; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 5126 | |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 5127 | // If the PHI has a single incoming value, follow that value, unless the |
| 5128 | // PHI's incoming blocks are in a different loop, in which case doing so |
| 5129 | // risks breaking LCSSA form. Instcombine would normally zap these, but |
| 5130 | // it doesn't have DominatorTree information, so it may miss cases. |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5131 | if (Value *V = SimplifyInstruction(PN, {getDataLayout(), &TLI, &DT, &AC})) |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5132 | if (LI.replacementPreservesLCSSAForm(PN, V)) |
Dan Gohman | a9c205c | 2010-02-25 06:57:05 +0000 | [diff] [blame] | 5133 | return getSCEV(V); |
Duncan Sands | 39d77131 | 2010-11-17 20:49:12 +0000 | [diff] [blame] | 5134 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5135 | // If it's not a loop phi, we can't handle it yet. |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5136 | return getUnknown(PN); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5137 | } |
| 5138 | |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 5139 | const SCEV *ScalarEvolution::createNodeForSelectOrPHI(Instruction *I, |
| 5140 | Value *Cond, |
| 5141 | Value *TrueVal, |
| 5142 | Value *FalseVal) { |
Mehdi Amini | 044cb34 | 2015-10-07 18:14:25 +0000 | [diff] [blame] | 5143 | // Handle "constant" branch or select. This can occur for instance when a |
| 5144 | // loop pass transforms an inner loop and moves on to process the outer loop. |
| 5145 | if (auto *CI = dyn_cast<ConstantInt>(Cond)) |
| 5146 | return getSCEV(CI->isOne() ? TrueVal : FalseVal); |
| 5147 | |
Sanjoy Das | d067134 | 2015-10-02 19:39:59 +0000 | [diff] [blame] | 5148 | // Try to match some simple smax or umax patterns. |
| 5149 | auto *ICI = dyn_cast<ICmpInst>(Cond); |
| 5150 | if (!ICI) |
| 5151 | return getUnknown(I); |
| 5152 | |
| 5153 | Value *LHS = ICI->getOperand(0); |
| 5154 | Value *RHS = ICI->getOperand(1); |
| 5155 | |
| 5156 | switch (ICI->getPredicate()) { |
| 5157 | case ICmpInst::ICMP_SLT: |
| 5158 | case ICmpInst::ICMP_SLE: |
| 5159 | std::swap(LHS, RHS); |
Justin Bogner | cd1d5aa | 2016-08-17 20:30:52 +0000 | [diff] [blame] | 5160 | LLVM_FALLTHROUGH; |
Sanjoy Das | d067134 | 2015-10-02 19:39:59 +0000 | [diff] [blame] | 5161 | case ICmpInst::ICMP_SGT: |
| 5162 | case ICmpInst::ICMP_SGE: |
| 5163 | // a >s b ? a+x : b+x -> smax(a, b)+x |
| 5164 | // a >s b ? b+x : a+x -> smin(a, b)+x |
| 5165 | if (getTypeSizeInBits(LHS->getType()) <= getTypeSizeInBits(I->getType())) { |
| 5166 | const SCEV *LS = getNoopOrSignExtend(getSCEV(LHS), I->getType()); |
| 5167 | const SCEV *RS = getNoopOrSignExtend(getSCEV(RHS), I->getType()); |
| 5168 | const SCEV *LA = getSCEV(TrueVal); |
| 5169 | const SCEV *RA = getSCEV(FalseVal); |
| 5170 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 5171 | const SCEV *RDiff = getMinusSCEV(RA, RS); |
| 5172 | if (LDiff == RDiff) |
| 5173 | return getAddExpr(getSMaxExpr(LS, RS), LDiff); |
| 5174 | LDiff = getMinusSCEV(LA, RS); |
| 5175 | RDiff = getMinusSCEV(RA, LS); |
| 5176 | if (LDiff == RDiff) |
| 5177 | return getAddExpr(getSMinExpr(LS, RS), LDiff); |
| 5178 | } |
| 5179 | break; |
| 5180 | case ICmpInst::ICMP_ULT: |
| 5181 | case ICmpInst::ICMP_ULE: |
| 5182 | std::swap(LHS, RHS); |
Justin Bogner | cd1d5aa | 2016-08-17 20:30:52 +0000 | [diff] [blame] | 5183 | LLVM_FALLTHROUGH; |
Sanjoy Das | d067134 | 2015-10-02 19:39:59 +0000 | [diff] [blame] | 5184 | case ICmpInst::ICMP_UGT: |
| 5185 | case ICmpInst::ICMP_UGE: |
| 5186 | // a >u b ? a+x : b+x -> umax(a, b)+x |
| 5187 | // a >u b ? b+x : a+x -> umin(a, b)+x |
| 5188 | if (getTypeSizeInBits(LHS->getType()) <= getTypeSizeInBits(I->getType())) { |
| 5189 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), I->getType()); |
| 5190 | const SCEV *RS = getNoopOrZeroExtend(getSCEV(RHS), I->getType()); |
| 5191 | const SCEV *LA = getSCEV(TrueVal); |
| 5192 | const SCEV *RA = getSCEV(FalseVal); |
| 5193 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 5194 | const SCEV *RDiff = getMinusSCEV(RA, RS); |
| 5195 | if (LDiff == RDiff) |
| 5196 | return getAddExpr(getUMaxExpr(LS, RS), LDiff); |
| 5197 | LDiff = getMinusSCEV(LA, RS); |
| 5198 | RDiff = getMinusSCEV(RA, LS); |
| 5199 | if (LDiff == RDiff) |
| 5200 | return getAddExpr(getUMinExpr(LS, RS), LDiff); |
| 5201 | } |
| 5202 | break; |
| 5203 | case ICmpInst::ICMP_NE: |
| 5204 | // n != 0 ? n+x : 1+x -> umax(n, 1)+x |
| 5205 | if (getTypeSizeInBits(LHS->getType()) <= getTypeSizeInBits(I->getType()) && |
| 5206 | isa<ConstantInt>(RHS) && cast<ConstantInt>(RHS)->isZero()) { |
| 5207 | const SCEV *One = getOne(I->getType()); |
| 5208 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), I->getType()); |
| 5209 | const SCEV *LA = getSCEV(TrueVal); |
| 5210 | const SCEV *RA = getSCEV(FalseVal); |
| 5211 | const SCEV *LDiff = getMinusSCEV(LA, LS); |
| 5212 | const SCEV *RDiff = getMinusSCEV(RA, One); |
| 5213 | if (LDiff == RDiff) |
| 5214 | return getAddExpr(getUMaxExpr(One, LS), LDiff); |
| 5215 | } |
| 5216 | break; |
| 5217 | case ICmpInst::ICMP_EQ: |
| 5218 | // n == 0 ? 1+x : n+x -> umax(n, 1)+x |
| 5219 | if (getTypeSizeInBits(LHS->getType()) <= getTypeSizeInBits(I->getType()) && |
| 5220 | isa<ConstantInt>(RHS) && cast<ConstantInt>(RHS)->isZero()) { |
| 5221 | const SCEV *One = getOne(I->getType()); |
| 5222 | const SCEV *LS = getNoopOrZeroExtend(getSCEV(LHS), I->getType()); |
| 5223 | const SCEV *LA = getSCEV(TrueVal); |
| 5224 | const SCEV *RA = getSCEV(FalseVal); |
| 5225 | const SCEV *LDiff = getMinusSCEV(LA, One); |
| 5226 | const SCEV *RDiff = getMinusSCEV(RA, LS); |
| 5227 | if (LDiff == RDiff) |
| 5228 | return getAddExpr(getUMaxExpr(One, LS), LDiff); |
| 5229 | } |
| 5230 | break; |
| 5231 | default: |
| 5232 | break; |
| 5233 | } |
| 5234 | |
| 5235 | return getUnknown(I); |
| 5236 | } |
| 5237 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 5238 | /// Expand GEP instructions into add and multiply operations. This allows them |
| 5239 | /// to be analyzed by regular SCEV code. |
Dan Gohman | b256ccf | 2009-12-18 02:09:29 +0000 | [diff] [blame] | 5240 | const SCEV *ScalarEvolution::createNodeForGEP(GEPOperator *GEP) { |
Dan Gohman | 30f24fe | 2009-05-09 00:14:52 +0000 | [diff] [blame] | 5241 | // Don't attempt to analyze GEPs over unsized objects. |
Eduard Burtescu | 19eb031 | 2016-01-19 17:28:00 +0000 | [diff] [blame] | 5242 | if (!GEP->getSourceElementType()->isSized()) |
Dan Gohman | 30f24fe | 2009-05-09 00:14:52 +0000 | [diff] [blame] | 5243 | return getUnknown(GEP); |
Matt Arsenault | 4c26590 | 2013-09-27 22:38:23 +0000 | [diff] [blame] | 5244 | |
Jingyue Wu | 2982d4d | 2015-05-18 17:03:25 +0000 | [diff] [blame] | 5245 | SmallVector<const SCEV *, 4> IndexExprs; |
| 5246 | for (auto Index = GEP->idx_begin(); Index != GEP->idx_end(); ++Index) |
| 5247 | IndexExprs.push_back(getSCEV(*Index)); |
Peter Collingbourne | 8dff039 | 2016-11-13 06:59:50 +0000 | [diff] [blame] | 5248 | return getGEPExpr(GEP, IndexExprs); |
Dan Gohman | ee750d1 | 2009-05-08 20:26:55 +0000 | [diff] [blame] | 5249 | } |
| 5250 | |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 5251 | uint32_t ScalarEvolution::GetMinTrailingZerosImpl(const SCEV *S) { |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5252 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 5253 | return C->getAPInt().countTrailingZeros(); |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 5254 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5255 | if (const SCEVTruncateExpr *T = dyn_cast<SCEVTruncateExpr>(S)) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5256 | return std::min(GetMinTrailingZeros(T->getOperand()), |
| 5257 | (uint32_t)getTypeSizeInBits(T->getType())); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5258 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5259 | if (const SCEVZeroExtendExpr *E = dyn_cast<SCEVZeroExtendExpr>(S)) { |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5260 | uint32_t OpRes = GetMinTrailingZeros(E->getOperand()); |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 5261 | return OpRes == getTypeSizeInBits(E->getOperand()->getType()) |
| 5262 | ? getTypeSizeInBits(E->getType()) |
| 5263 | : OpRes; |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5264 | } |
| 5265 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5266 | if (const SCEVSignExtendExpr *E = dyn_cast<SCEVSignExtendExpr>(S)) { |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5267 | uint32_t OpRes = GetMinTrailingZeros(E->getOperand()); |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 5268 | return OpRes == getTypeSizeInBits(E->getOperand()->getType()) |
| 5269 | ? getTypeSizeInBits(E->getType()) |
| 5270 | : OpRes; |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5271 | } |
| 5272 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5273 | if (const SCEVAddExpr *A = dyn_cast<SCEVAddExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5274 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5275 | uint32_t MinOpRes = GetMinTrailingZeros(A->getOperand(0)); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5276 | for (unsigned i = 1, e = A->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5277 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(A->getOperand(i))); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5278 | return MinOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 5279 | } |
| 5280 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5281 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5282 | // The result is the sum of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5283 | uint32_t SumOpRes = GetMinTrailingZeros(M->getOperand(0)); |
| 5284 | uint32_t BitWidth = getTypeSizeInBits(M->getType()); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5285 | for (unsigned i = 1, e = M->getNumOperands(); |
| 5286 | SumOpRes != BitWidth && i != e; ++i) |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 5287 | SumOpRes = |
| 5288 | std::min(SumOpRes + GetMinTrailingZeros(M->getOperand(i)), BitWidth); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5289 | return SumOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 5290 | } |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5291 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5292 | if (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(S)) { |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5293 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5294 | uint32_t MinOpRes = GetMinTrailingZeros(A->getOperand(0)); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5295 | for (unsigned i = 1, e = A->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5296 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(A->getOperand(i))); |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5297 | return MinOpRes; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 5298 | } |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5299 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5300 | if (const SCEVSMaxExpr *M = dyn_cast<SCEVSMaxExpr>(S)) { |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 5301 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5302 | uint32_t MinOpRes = GetMinTrailingZeros(M->getOperand(0)); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 5303 | for (unsigned i = 1, e = M->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5304 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(M->getOperand(i))); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 5305 | return MinOpRes; |
| 5306 | } |
| 5307 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 5308 | if (const SCEVUMaxExpr *M = dyn_cast<SCEVUMaxExpr>(S)) { |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 5309 | // The result is the min of all operands results. |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5310 | uint32_t MinOpRes = GetMinTrailingZeros(M->getOperand(0)); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 5311 | for (unsigned i = 1, e = M->getNumOperands(); MinOpRes && i != e; ++i) |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5312 | MinOpRes = std::min(MinOpRes, GetMinTrailingZeros(M->getOperand(i))); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 5313 | return MinOpRes; |
| 5314 | } |
| 5315 | |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5316 | if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { |
| 5317 | // For a SCEVUnknown, ask ValueTracking. |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 5318 | KnownBits Known = computeKnownBits(U->getValue(), getDataLayout(), 0, &AC, nullptr, &DT); |
Craig Topper | 8df66c6 | 2017-05-12 17:20:30 +0000 | [diff] [blame] | 5319 | return Known.countMinTrailingZeros(); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5320 | } |
| 5321 | |
| 5322 | // SCEVUDivExpr |
Nick Lewycky | 3783b46 | 2007-11-22 07:59:40 +0000 | [diff] [blame] | 5323 | return 0; |
Chris Lattner | 49b090e | 2006-12-12 02:26:09 +0000 | [diff] [blame] | 5324 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 5325 | |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 5326 | uint32_t ScalarEvolution::GetMinTrailingZeros(const SCEV *S) { |
| 5327 | auto I = MinTrailingZerosCache.find(S); |
| 5328 | if (I != MinTrailingZerosCache.end()) |
| 5329 | return I->second; |
| 5330 | |
| 5331 | uint32_t Result = GetMinTrailingZerosImpl(S); |
| 5332 | auto InsertPair = MinTrailingZerosCache.insert({S, Result}); |
| 5333 | assert(InsertPair.second && "Should insert a new key"); |
| 5334 | return InsertPair.first->second; |
| 5335 | } |
| 5336 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 5337 | /// Helper method to assign a range to V from metadata present in the IR. |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 5338 | static Optional<ConstantRange> GetRangeFromMetadata(Value *V) { |
Sanjoy Das | a7e1378 | 2015-10-24 05:37:35 +0000 | [diff] [blame] | 5339 | if (Instruction *I = dyn_cast<Instruction>(V)) |
| 5340 | if (MDNode *MD = I->getMetadata(LLVMContext::MD_range)) |
| 5341 | return getConstantRangeFromMetadata(*MD); |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 5342 | |
| 5343 | return None; |
| 5344 | } |
| 5345 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 5346 | /// Determine the range for a particular SCEV. If SignHint is |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5347 | /// HINT_RANGE_UNSIGNED (resp. HINT_RANGE_SIGNED) then getRange prefers ranges |
| 5348 | /// with a "cleaner" unsigned (resp. signed) representation. |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5349 | const ConstantRange & |
| 5350 | ScalarEvolution::getRangeRef(const SCEV *S, |
| 5351 | ScalarEvolution::RangeSignHint SignHint) { |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5352 | DenseMap<const SCEV *, ConstantRange> &Cache = |
| 5353 | SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED ? UnsignedRanges |
| 5354 | : SignedRanges; |
| 5355 | |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 5356 | // See if we've computed this range already. |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5357 | DenseMap<const SCEV *, ConstantRange>::iterator I = Cache.find(S); |
| 5358 | if (I != Cache.end()) |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 5359 | return I->second; |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5360 | |
| 5361 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 5362 | return setRange(C, SignHint, ConstantRange(C->getAPInt())); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5363 | |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 5364 | unsigned BitWidth = getTypeSizeInBits(S->getType()); |
| 5365 | ConstantRange ConservativeResult(BitWidth, /*isFullSet=*/true); |
| 5366 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5367 | // If the value has known zeros, the maximum value will have those known zeros |
| 5368 | // as well. |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 5369 | uint32_t TZ = GetMinTrailingZeros(S); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5370 | if (TZ != 0) { |
| 5371 | if (SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED) |
| 5372 | ConservativeResult = |
| 5373 | ConstantRange(APInt::getMinValue(BitWidth), |
| 5374 | APInt::getMaxValue(BitWidth).lshr(TZ).shl(TZ) + 1); |
| 5375 | else |
| 5376 | ConservativeResult = ConstantRange( |
| 5377 | APInt::getSignedMinValue(BitWidth), |
| 5378 | APInt::getSignedMaxValue(BitWidth).ashr(TZ).shl(TZ) + 1); |
| 5379 | } |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 5380 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5381 | if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5382 | ConstantRange X = getRangeRef(Add->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5383 | for (unsigned i = 1, e = Add->getNumOperands(); i != e; ++i) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5384 | X = X.add(getRangeRef(Add->getOperand(i), SignHint)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5385 | return setRange(Add, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5386 | } |
| 5387 | |
| 5388 | if (const SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5389 | ConstantRange X = getRangeRef(Mul->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5390 | for (unsigned i = 1, e = Mul->getNumOperands(); i != e; ++i) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5391 | X = X.multiply(getRangeRef(Mul->getOperand(i), SignHint)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5392 | return setRange(Mul, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5393 | } |
| 5394 | |
| 5395 | if (const SCEVSMaxExpr *SMax = dyn_cast<SCEVSMaxExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5396 | ConstantRange X = getRangeRef(SMax->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5397 | for (unsigned i = 1, e = SMax->getNumOperands(); i != e; ++i) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5398 | X = X.smax(getRangeRef(SMax->getOperand(i), SignHint)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5399 | return setRange(SMax, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5400 | } |
| 5401 | |
| 5402 | if (const SCEVUMaxExpr *UMax = dyn_cast<SCEVUMaxExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5403 | ConstantRange X = getRangeRef(UMax->getOperand(0), SignHint); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5404 | for (unsigned i = 1, e = UMax->getNumOperands(); i != e; ++i) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5405 | X = X.umax(getRangeRef(UMax->getOperand(i), SignHint)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5406 | return setRange(UMax, SignHint, ConservativeResult.intersectWith(X)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5407 | } |
| 5408 | |
| 5409 | if (const SCEVUDivExpr *UDiv = dyn_cast<SCEVUDivExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5410 | ConstantRange X = getRangeRef(UDiv->getLHS(), SignHint); |
| 5411 | ConstantRange Y = getRangeRef(UDiv->getRHS(), SignHint); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5412 | return setRange(UDiv, SignHint, |
| 5413 | ConservativeResult.intersectWith(X.udiv(Y))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5414 | } |
| 5415 | |
| 5416 | if (const SCEVZeroExtendExpr *ZExt = dyn_cast<SCEVZeroExtendExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5417 | ConstantRange X = getRangeRef(ZExt->getOperand(), SignHint); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5418 | return setRange(ZExt, SignHint, |
| 5419 | ConservativeResult.intersectWith(X.zeroExtend(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5420 | } |
| 5421 | |
| 5422 | if (const SCEVSignExtendExpr *SExt = dyn_cast<SCEVSignExtendExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5423 | ConstantRange X = getRangeRef(SExt->getOperand(), SignHint); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5424 | return setRange(SExt, SignHint, |
| 5425 | ConservativeResult.intersectWith(X.signExtend(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5426 | } |
| 5427 | |
| 5428 | if (const SCEVTruncateExpr *Trunc = dyn_cast<SCEVTruncateExpr>(S)) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5429 | ConstantRange X = getRangeRef(Trunc->getOperand(), SignHint); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5430 | return setRange(Trunc, SignHint, |
| 5431 | ConservativeResult.intersectWith(X.truncate(BitWidth))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5432 | } |
| 5433 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5434 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(S)) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5435 | // If there's no unsigned wrap, the value will never be less than its |
| 5436 | // initial value. |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 5437 | if (AddRec->hasNoUnsignedWrap()) |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5438 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(AddRec->getStart())) |
Dan Gohman | ebbd05f | 2010-04-12 23:08:18 +0000 | [diff] [blame] | 5439 | if (!C->getValue()->isZero()) |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 5440 | ConservativeResult = ConservativeResult.intersectWith( |
| 5441 | ConstantRange(C->getAPInt(), APInt(BitWidth, 0))); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5442 | |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5443 | // If there's no signed wrap, and all the operands have the same sign or |
| 5444 | // zero, the value won't ever change sign. |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 5445 | if (AddRec->hasNoSignedWrap()) { |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5446 | bool AllNonNeg = true; |
| 5447 | bool AllNonPos = true; |
| 5448 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) { |
| 5449 | if (!isKnownNonNegative(AddRec->getOperand(i))) AllNonNeg = false; |
| 5450 | if (!isKnownNonPositive(AddRec->getOperand(i))) AllNonPos = false; |
| 5451 | } |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5452 | if (AllNonNeg) |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 5453 | ConservativeResult = ConservativeResult.intersectWith( |
| 5454 | ConstantRange(APInt(BitWidth, 0), |
| 5455 | APInt::getSignedMinValue(BitWidth))); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5456 | else if (AllNonPos) |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 5457 | ConservativeResult = ConservativeResult.intersectWith( |
| 5458 | ConstantRange(APInt::getSignedMinValue(BitWidth), |
| 5459 | APInt(BitWidth, 1))); |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5460 | } |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5461 | |
| 5462 | // TODO: non-affine addrec |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 5463 | if (AddRec->isAffine()) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 5464 | const SCEV *MaxBECount = getMaxBackedgeTakenCount(AddRec->getLoop()); |
Dan Gohman | 85be433 | 2010-01-26 19:19:05 +0000 | [diff] [blame] | 5465 | if (!isa<SCEVCouldNotCompute>(MaxBECount) && |
| 5466 | getTypeSizeInBits(MaxBECount->getType()) <= BitWidth) { |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5467 | auto RangeFromAffine = getRangeForAffineAR( |
| 5468 | AddRec->getStart(), AddRec->getStepRecurrence(*this), MaxBECount, |
| 5469 | BitWidth); |
| 5470 | if (!RangeFromAffine.isFullSet()) |
| 5471 | ConservativeResult = |
| 5472 | ConservativeResult.intersectWith(RangeFromAffine); |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5473 | |
| 5474 | auto RangeFromFactoring = getRangeViaFactoring( |
| 5475 | AddRec->getStart(), AddRec->getStepRecurrence(*this), MaxBECount, |
| 5476 | BitWidth); |
| 5477 | if (!RangeFromFactoring.isFullSet()) |
| 5478 | ConservativeResult = |
| 5479 | ConservativeResult.intersectWith(RangeFromFactoring); |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 5480 | } |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 5481 | } |
Dan Gohman | 51ad99d | 2010-01-21 02:09:26 +0000 | [diff] [blame] | 5482 | |
Craig Topper | 252682a | 2017-05-07 16:28:17 +0000 | [diff] [blame] | 5483 | return setRange(AddRec, SignHint, std::move(ConservativeResult)); |
Dan Gohman | d261d27 | 2009-06-24 01:05:09 +0000 | [diff] [blame] | 5484 | } |
| 5485 | |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5486 | if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 5487 | // Check if the IR explicitly contains !range metadata. |
| 5488 | Optional<ConstantRange> MDRange = GetRangeFromMetadata(U->getValue()); |
| 5489 | if (MDRange.hasValue()) |
| 5490 | ConservativeResult = ConservativeResult.intersectWith(MDRange.getValue()); |
| 5491 | |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5492 | // Split here to avoid paying the compile-time cost of calling both |
| 5493 | // computeKnownBits and ComputeNumSignBits. This restriction can be lifted |
| 5494 | // if needed. |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 5495 | const DataLayout &DL = getDataLayout(); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5496 | if (SignHint == ScalarEvolution::HINT_RANGE_UNSIGNED) { |
| 5497 | // For a SCEVUnknown, ask ValueTracking. |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 5498 | KnownBits Known = computeKnownBits(U->getValue(), DL, 0, &AC, nullptr, &DT); |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 5499 | if (Known.One != ~Known.Zero + 1) |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5500 | ConservativeResult = |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 5501 | ConservativeResult.intersectWith(ConstantRange(Known.One, |
| 5502 | ~Known.Zero + 1)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5503 | } else { |
| 5504 | assert(SignHint == ScalarEvolution::HINT_RANGE_SIGNED && |
| 5505 | "generalize as needed!"); |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 5506 | unsigned NS = ComputeNumSignBits(U->getValue(), DL, 0, &AC, nullptr, &DT); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5507 | if (NS > 1) |
| 5508 | ConservativeResult = ConservativeResult.intersectWith( |
| 5509 | ConstantRange(APInt::getSignedMinValue(BitWidth).ashr(NS - 1), |
| 5510 | APInt::getSignedMaxValue(BitWidth).ashr(NS - 1) + 1)); |
Sanjoy Das | 91b5477 | 2015-03-09 21:43:43 +0000 | [diff] [blame] | 5511 | } |
| 5512 | |
Craig Topper | 252682a | 2017-05-07 16:28:17 +0000 | [diff] [blame] | 5513 | return setRange(U, SignHint, std::move(ConservativeResult)); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5514 | } |
| 5515 | |
Craig Topper | 252682a | 2017-05-07 16:28:17 +0000 | [diff] [blame] | 5516 | return setRange(S, SignHint, std::move(ConservativeResult)); |
Dan Gohman | c702fc0 | 2009-06-19 23:29:04 +0000 | [diff] [blame] | 5517 | } |
| 5518 | |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5519 | // Given a StartRange, Step and MaxBECount for an expression compute a range of |
| 5520 | // values that the expression can take. Initially, the expression has a value |
| 5521 | // from StartRange and then is changed by Step up to MaxBECount times. Signed |
| 5522 | // argument defines if we treat Step as signed or unsigned. |
| 5523 | static ConstantRange getRangeForAffineARHelper(APInt Step, |
Craig Topper | d6f2639 | 2017-05-08 02:29:15 +0000 | [diff] [blame] | 5524 | const ConstantRange &StartRange, |
Craig Topper | 6c5e22a | 2017-05-06 06:03:07 +0000 | [diff] [blame] | 5525 | const APInt &MaxBECount, |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5526 | unsigned BitWidth, bool Signed) { |
| 5527 | // If either Step or MaxBECount is 0, then the expression won't change, and we |
| 5528 | // just need to return the initial range. |
| 5529 | if (Step == 0 || MaxBECount == 0) |
| 5530 | return StartRange; |
| 5531 | |
Simon Pilgrim | 6bdc755 | 2017-03-31 10:59:37 +0000 | [diff] [blame] | 5532 | // If we don't know anything about the initial value (i.e. StartRange is |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5533 | // FullRange), then we don't know anything about the final range either. |
| 5534 | // Return FullRange. |
| 5535 | if (StartRange.isFullSet()) |
| 5536 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5537 | |
| 5538 | // If Step is signed and negative, then we use its absolute value, but we also |
| 5539 | // note that we're moving in the opposite direction. |
| 5540 | bool Descending = Signed && Step.isNegative(); |
| 5541 | |
| 5542 | if (Signed) |
| 5543 | // This is correct even for INT_SMIN. Let's look at i8 to illustrate this: |
| 5544 | // abs(INT_SMIN) = abs(-128) = abs(0x80) = -0x80 = 0x80 = 128. |
| 5545 | // This equations hold true due to the well-defined wrap-around behavior of |
| 5546 | // APInt. |
| 5547 | Step = Step.abs(); |
| 5548 | |
| 5549 | // Check if Offset is more than full span of BitWidth. If it is, the |
| 5550 | // expression is guaranteed to overflow. |
| 5551 | if (APInt::getMaxValue(StartRange.getBitWidth()).udiv(Step).ult(MaxBECount)) |
| 5552 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5553 | |
| 5554 | // Offset is by how much the expression can change. Checks above guarantee no |
| 5555 | // overflow here. |
| 5556 | APInt Offset = Step * MaxBECount; |
| 5557 | |
| 5558 | // Minimum value of the final range will match the minimal value of StartRange |
| 5559 | // if the expression is increasing and will be decreased by Offset otherwise. |
| 5560 | // Maximum value of the final range will match the maximal value of StartRange |
| 5561 | // if the expression is decreasing and will be increased by Offset otherwise. |
| 5562 | APInt StartLower = StartRange.getLower(); |
| 5563 | APInt StartUpper = StartRange.getUpper() - 1; |
Craig Topper | 6c5e22a | 2017-05-06 06:03:07 +0000 | [diff] [blame] | 5564 | APInt MovedBoundary = Descending ? (StartLower - std::move(Offset)) |
| 5565 | : (StartUpper + std::move(Offset)); |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5566 | |
| 5567 | // It's possible that the new minimum/maximum value will fall into the initial |
| 5568 | // range (due to wrap around). This means that the expression can take any |
| 5569 | // value in this bitwidth, and we have to return full range. |
| 5570 | if (StartRange.contains(MovedBoundary)) |
| 5571 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5572 | |
Craig Topper | 6c5e22a | 2017-05-06 06:03:07 +0000 | [diff] [blame] | 5573 | APInt NewLower = |
| 5574 | Descending ? std::move(MovedBoundary) : std::move(StartLower); |
| 5575 | APInt NewUpper = |
| 5576 | Descending ? std::move(StartUpper) : std::move(MovedBoundary); |
| 5577 | NewUpper += 1; |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5578 | |
| 5579 | // If we end up with full range, return a proper full range. |
Craig Topper | 6c5e22a | 2017-05-06 06:03:07 +0000 | [diff] [blame] | 5580 | if (NewLower == NewUpper) |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5581 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5582 | |
| 5583 | // No overflow detected, return [StartLower, StartUpper + Offset + 1) range. |
Craig Topper | 6c5e22a | 2017-05-06 06:03:07 +0000 | [diff] [blame] | 5584 | return ConstantRange(std::move(NewLower), std::move(NewUpper)); |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5585 | } |
| 5586 | |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5587 | ConstantRange ScalarEvolution::getRangeForAffineAR(const SCEV *Start, |
| 5588 | const SCEV *Step, |
| 5589 | const SCEV *MaxBECount, |
| 5590 | unsigned BitWidth) { |
| 5591 | assert(!isa<SCEVCouldNotCompute>(MaxBECount) && |
| 5592 | getTypeSizeInBits(MaxBECount->getType()) <= BitWidth && |
| 5593 | "Precondition!"); |
| 5594 | |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5595 | MaxBECount = getNoopOrZeroExtend(MaxBECount, Start->getType()); |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5596 | APInt MaxBECountValue = getUnsignedRangeMax(MaxBECount); |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5597 | |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5598 | // First, consider step signed. |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5599 | ConstantRange StartSRange = getSignedRange(Start); |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5600 | ConstantRange StepSRange = getSignedRange(Step); |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5601 | |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5602 | // If Step can be both positive and negative, we need to find ranges for the |
| 5603 | // maximum absolute step values in both directions and union them. |
| 5604 | ConstantRange SR = |
| 5605 | getRangeForAffineARHelper(StepSRange.getSignedMin(), StartSRange, |
| 5606 | MaxBECountValue, BitWidth, /* Signed = */ true); |
| 5607 | SR = SR.unionWith(getRangeForAffineARHelper(StepSRange.getSignedMax(), |
| 5608 | StartSRange, MaxBECountValue, |
| 5609 | BitWidth, /* Signed = */ true)); |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5610 | |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5611 | // Next, consider step unsigned. |
| 5612 | ConstantRange UR = getRangeForAffineARHelper( |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 5613 | getUnsignedRangeMax(Step), getUnsignedRange(Start), |
Michael Zolotukhin | 99de88d | 2017-03-16 21:07:38 +0000 | [diff] [blame] | 5614 | MaxBECountValue, BitWidth, /* Signed = */ false); |
| 5615 | |
| 5616 | // Finally, intersect signed and unsigned ranges. |
| 5617 | return SR.intersectWith(UR); |
Sanjoy Das | b765b63 | 2016-03-02 00:57:39 +0000 | [diff] [blame] | 5618 | } |
| 5619 | |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5620 | ConstantRange ScalarEvolution::getRangeViaFactoring(const SCEV *Start, |
| 5621 | const SCEV *Step, |
| 5622 | const SCEV *MaxBECount, |
| 5623 | unsigned BitWidth) { |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5624 | // RangeOf({C?A:B,+,C?P:Q}) == RangeOf(C?{A,+,P}:{B,+,Q}) |
| 5625 | // == RangeOf({A,+,P}) union RangeOf({B,+,Q}) |
| 5626 | |
| 5627 | struct SelectPattern { |
| 5628 | Value *Condition = nullptr; |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5629 | APInt TrueValue; |
| 5630 | APInt FalseValue; |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5631 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5632 | explicit SelectPattern(ScalarEvolution &SE, unsigned BitWidth, |
| 5633 | const SCEV *S) { |
| 5634 | Optional<unsigned> CastOp; |
Sanjoy Das | 97d19bd | 2016-03-09 01:51:02 +0000 | [diff] [blame] | 5635 | APInt Offset(BitWidth, 0); |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5636 | |
| 5637 | assert(SE.getTypeSizeInBits(S->getType()) == BitWidth && |
| 5638 | "Should be!"); |
| 5639 | |
Sanjoy Das | 97d19bd | 2016-03-09 01:51:02 +0000 | [diff] [blame] | 5640 | // Peel off a constant offset: |
| 5641 | if (auto *SA = dyn_cast<SCEVAddExpr>(S)) { |
| 5642 | // In the future we could consider being smarter here and handle |
| 5643 | // {Start+Step,+,Step} too. |
| 5644 | if (SA->getNumOperands() != 2 || !isa<SCEVConstant>(SA->getOperand(0))) |
| 5645 | return; |
| 5646 | |
| 5647 | Offset = cast<SCEVConstant>(SA->getOperand(0))->getAPInt(); |
| 5648 | S = SA->getOperand(1); |
| 5649 | } |
| 5650 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5651 | // Peel off a cast operation |
| 5652 | if (auto *SCast = dyn_cast<SCEVCastExpr>(S)) { |
| 5653 | CastOp = SCast->getSCEVType(); |
| 5654 | S = SCast->getOperand(); |
| 5655 | } |
| 5656 | |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5657 | using namespace llvm::PatternMatch; |
| 5658 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5659 | auto *SU = dyn_cast<SCEVUnknown>(S); |
| 5660 | const APInt *TrueVal, *FalseVal; |
| 5661 | if (!SU || |
| 5662 | !match(SU->getValue(), m_Select(m_Value(Condition), m_APInt(TrueVal), |
| 5663 | m_APInt(FalseVal)))) { |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5664 | Condition = nullptr; |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5665 | return; |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5666 | } |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5667 | |
| 5668 | TrueValue = *TrueVal; |
| 5669 | FalseValue = *FalseVal; |
| 5670 | |
| 5671 | // Re-apply the cast we peeled off earlier |
| 5672 | if (CastOp.hasValue()) |
| 5673 | switch (*CastOp) { |
| 5674 | default: |
| 5675 | llvm_unreachable("Unknown SCEV cast type!"); |
| 5676 | |
| 5677 | case scTruncate: |
| 5678 | TrueValue = TrueValue.trunc(BitWidth); |
| 5679 | FalseValue = FalseValue.trunc(BitWidth); |
| 5680 | break; |
| 5681 | case scZeroExtend: |
| 5682 | TrueValue = TrueValue.zext(BitWidth); |
| 5683 | FalseValue = FalseValue.zext(BitWidth); |
| 5684 | break; |
| 5685 | case scSignExtend: |
| 5686 | TrueValue = TrueValue.sext(BitWidth); |
| 5687 | FalseValue = FalseValue.sext(BitWidth); |
| 5688 | break; |
| 5689 | } |
Sanjoy Das | 97d19bd | 2016-03-09 01:51:02 +0000 | [diff] [blame] | 5690 | |
| 5691 | // Re-apply the constant offset we peeled off earlier |
| 5692 | TrueValue += Offset; |
| 5693 | FalseValue += Offset; |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5694 | } |
| 5695 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5696 | bool isRecognized() { return Condition != nullptr; } |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5697 | }; |
| 5698 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5699 | SelectPattern StartPattern(*this, BitWidth, Start); |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5700 | if (!StartPattern.isRecognized()) |
| 5701 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5702 | |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5703 | SelectPattern StepPattern(*this, BitWidth, Step); |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5704 | if (!StepPattern.isRecognized()) |
| 5705 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5706 | |
| 5707 | if (StartPattern.Condition != StepPattern.Condition) { |
| 5708 | // We don't handle this case today; but we could, by considering four |
| 5709 | // possibilities below instead of two. I'm not sure if there are cases where |
| 5710 | // that will help over what getRange already does, though. |
| 5711 | return ConstantRange(BitWidth, /* isFullSet = */ true); |
| 5712 | } |
| 5713 | |
| 5714 | // NB! Calling ScalarEvolution::getConstant is fine, but we should not try to |
| 5715 | // construct arbitrary general SCEV expressions here. This function is called |
| 5716 | // from deep in the call stack, and calling getSCEV (on a sext instruction, |
| 5717 | // say) can end up caching a suboptimal value. |
| 5718 | |
Sanjoy Das | 6b017a1 | 2016-03-02 02:56:29 +0000 | [diff] [blame] | 5719 | // FIXME: without the explicit `this` receiver below, MSVC errors out with |
| 5720 | // C2352 and C2512 (otherwise it isn't needed). |
| 5721 | |
Sanjoy Das | 97d19bd | 2016-03-09 01:51:02 +0000 | [diff] [blame] | 5722 | const SCEV *TrueStart = this->getConstant(StartPattern.TrueValue); |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5723 | const SCEV *TrueStep = this->getConstant(StepPattern.TrueValue); |
Sanjoy Das | 97d19bd | 2016-03-09 01:51:02 +0000 | [diff] [blame] | 5724 | const SCEV *FalseStart = this->getConstant(StartPattern.FalseValue); |
Sanjoy Das | d3488c6 | 2016-03-09 01:50:57 +0000 | [diff] [blame] | 5725 | const SCEV *FalseStep = this->getConstant(StepPattern.FalseValue); |
Sanjoy Das | 62a1c33 | 2016-03-02 02:15:42 +0000 | [diff] [blame] | 5726 | |
Sanjoy Das | 1168f93 | 2016-03-02 02:34:20 +0000 | [diff] [blame] | 5727 | ConstantRange TrueRange = |
Sanjoy Das | eca1b53 | 2016-03-02 02:44:08 +0000 | [diff] [blame] | 5728 | this->getRangeForAffineAR(TrueStart, TrueStep, MaxBECount, BitWidth); |
Sanjoy Das | 1168f93 | 2016-03-02 02:34:20 +0000 | [diff] [blame] | 5729 | ConstantRange FalseRange = |
Sanjoy Das | eca1b53 | 2016-03-02 02:44:08 +0000 | [diff] [blame] | 5730 | this->getRangeForAffineAR(FalseStart, FalseStep, MaxBECount, BitWidth); |
Sanjoy Das | bf73098 | 2016-03-02 00:57:54 +0000 | [diff] [blame] | 5731 | |
| 5732 | return TrueRange.unionWith(FalseRange); |
| 5733 | } |
| 5734 | |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5735 | SCEV::NoWrapFlags ScalarEvolution::getNoWrapFlagsFromUB(const Value *V) { |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 5736 | if (isa<ConstantExpr>(V)) return SCEV::FlagAnyWrap; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5737 | const BinaryOperator *BinOp = cast<BinaryOperator>(V); |
| 5738 | |
| 5739 | // Return early if there are no flags to propagate to the SCEV. |
| 5740 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
| 5741 | if (BinOp->hasNoUnsignedWrap()) |
| 5742 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNUW); |
| 5743 | if (BinOp->hasNoSignedWrap()) |
| 5744 | Flags = ScalarEvolution::setFlags(Flags, SCEV::FlagNSW); |
Sanjoy Das | dcd3a88 | 2016-03-02 04:52:22 +0000 | [diff] [blame] | 5745 | if (Flags == SCEV::FlagAnyWrap) |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5746 | return SCEV::FlagAnyWrap; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5747 | |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5748 | return isSCEVExprNeverPoison(BinOp) ? Flags : SCEV::FlagAnyWrap; |
| 5749 | } |
| 5750 | |
| 5751 | bool ScalarEvolution::isSCEVExprNeverPoison(const Instruction *I) { |
| 5752 | // Here we check that I is in the header of the innermost loop containing I, |
| 5753 | // since we only deal with instructions in the loop header. The actual loop we |
| 5754 | // need to check later will come from an add recurrence, but getting that |
| 5755 | // requires computing the SCEV of the operands, which can be expensive. This |
| 5756 | // check we can do cheaply to rule out some cases early. |
| 5757 | Loop *InnermostContainingLoop = LI.getLoopFor(I->getParent()); |
Sanjoy Das | dcd3a88 | 2016-03-02 04:52:22 +0000 | [diff] [blame] | 5758 | if (InnermostContainingLoop == nullptr || |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5759 | InnermostContainingLoop->getHeader() != I->getParent()) |
| 5760 | return false; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5761 | |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5762 | // Only proceed if we can prove that I does not yield poison. |
Sanjoy Das | 08989c7 | 2017-04-30 19:41:19 +0000 | [diff] [blame] | 5763 | if (!programUndefinedIfFullPoison(I)) |
| 5764 | return false; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5765 | |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5766 | // At this point we know that if I is executed, then it does not wrap |
| 5767 | // according to at least one of NSW or NUW. If I is not executed, then we do |
| 5768 | // not know if the calculation that I represents would wrap. Multiple |
| 5769 | // instructions can map to the same SCEV. If we apply NSW or NUW from I to |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5770 | // the SCEV, we must guarantee no wrapping for that SCEV also when it is |
| 5771 | // derived from other instructions that map to the same SCEV. We cannot make |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5772 | // that guarantee for cases where I is not executed. So we need to find the |
| 5773 | // loop that I is considered in relation to and prove that I is executed for |
| 5774 | // every iteration of that loop. That implies that the value that I |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5775 | // calculates does not wrap anywhere in the loop, so then we can apply the |
| 5776 | // flags to the SCEV. |
| 5777 | // |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5778 | // We check isLoopInvariant to disambiguate in case we are adding recurrences |
| 5779 | // from different loops, so that we know which loop to prove that I is |
| 5780 | // executed in. |
| 5781 | for (unsigned OpIndex = 0; OpIndex < I->getNumOperands(); ++OpIndex) { |
Hans Wennborg | 3879035 | 2016-08-17 22:50:18 +0000 | [diff] [blame] | 5782 | // I could be an extractvalue from a call to an overflow intrinsic. |
| 5783 | // TODO: We can do better here in some cases. |
| 5784 | if (!isSCEVable(I->getOperand(OpIndex)->getType())) |
| 5785 | return false; |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5786 | const SCEV *Op = getSCEV(I->getOperand(OpIndex)); |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5787 | if (auto *AddRec = dyn_cast<SCEVAddRecExpr>(Op)) { |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5788 | bool AllOtherOpsLoopInvariant = true; |
| 5789 | for (unsigned OtherOpIndex = 0; OtherOpIndex < I->getNumOperands(); |
| 5790 | ++OtherOpIndex) { |
| 5791 | if (OtherOpIndex != OpIndex) { |
| 5792 | const SCEV *OtherOp = getSCEV(I->getOperand(OtherOpIndex)); |
| 5793 | if (!isLoopInvariant(OtherOp, AddRec->getLoop())) { |
| 5794 | AllOtherOpsLoopInvariant = false; |
| 5795 | break; |
| 5796 | } |
| 5797 | } |
| 5798 | } |
| 5799 | if (AllOtherOpsLoopInvariant && |
| 5800 | isGuaranteedToExecuteForEveryIteration(I, AddRec->getLoop())) |
| 5801 | return true; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5802 | } |
| 5803 | } |
Sanjoy Das | efdeb45 | 2016-04-22 05:38:54 +0000 | [diff] [blame] | 5804 | return false; |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5805 | } |
| 5806 | |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5807 | bool ScalarEvolution::isAddRecNeverPoison(const Instruction *I, const Loop *L) { |
| 5808 | // If we know that \c I can never be poison period, then that's enough. |
| 5809 | if (isSCEVExprNeverPoison(I)) |
| 5810 | return true; |
| 5811 | |
| 5812 | // For an add recurrence specifically, we assume that infinite loops without |
| 5813 | // side effects are undefined behavior, and then reason as follows: |
| 5814 | // |
| 5815 | // If the add recurrence is poison in any iteration, it is poison on all |
| 5816 | // future iterations (since incrementing poison yields poison). If the result |
| 5817 | // of the add recurrence is fed into the loop latch condition and the loop |
| 5818 | // does not contain any throws or exiting blocks other than the latch, we now |
| 5819 | // have the ability to "choose" whether the backedge is taken or not (by |
| 5820 | // choosing a sufficiently evil value for the poison feeding into the branch) |
| 5821 | // for every iteration including and after the one in which \p I first became |
| 5822 | // poison. There are two possibilities (let's call the iteration in which \p |
| 5823 | // I first became poison as K): |
| 5824 | // |
| 5825 | // 1. In the set of iterations including and after K, the loop body executes |
| 5826 | // no side effects. In this case executing the backege an infinte number |
| 5827 | // of times will yield undefined behavior. |
| 5828 | // |
| 5829 | // 2. In the set of iterations including and after K, the loop body executes |
| 5830 | // at least one side effect. In this case, that specific instance of side |
| 5831 | // effect is control dependent on poison, which also yields undefined |
| 5832 | // behavior. |
| 5833 | |
| 5834 | auto *ExitingBB = L->getExitingBlock(); |
| 5835 | auto *LatchBB = L->getLoopLatch(); |
| 5836 | if (!ExitingBB || !LatchBB || ExitingBB != LatchBB) |
| 5837 | return false; |
| 5838 | |
| 5839 | SmallPtrSet<const Instruction *, 16> Pushed; |
Sanjoy Das | a19edc4 | 2016-06-08 17:48:31 +0000 | [diff] [blame] | 5840 | SmallVector<const Instruction *, 8> PoisonStack; |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5841 | |
Sanjoy Das | a19edc4 | 2016-06-08 17:48:31 +0000 | [diff] [blame] | 5842 | // We start by assuming \c I, the post-inc add recurrence, is poison. Only |
| 5843 | // things that are known to be fully poison under that assumption go on the |
| 5844 | // PoisonStack. |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5845 | Pushed.insert(I); |
Sanjoy Das | a19edc4 | 2016-06-08 17:48:31 +0000 | [diff] [blame] | 5846 | PoisonStack.push_back(I); |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5847 | |
| 5848 | bool LatchControlDependentOnPoison = false; |
Sanjoy Das | 2401c98 | 2016-06-08 17:48:46 +0000 | [diff] [blame] | 5849 | while (!PoisonStack.empty() && !LatchControlDependentOnPoison) { |
Sanjoy Das | a19edc4 | 2016-06-08 17:48:31 +0000 | [diff] [blame] | 5850 | const Instruction *Poison = PoisonStack.pop_back_val(); |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5851 | |
Sanjoy Das | a19edc4 | 2016-06-08 17:48:31 +0000 | [diff] [blame] | 5852 | for (auto *PoisonUser : Poison->users()) { |
| 5853 | if (propagatesFullPoison(cast<Instruction>(PoisonUser))) { |
| 5854 | if (Pushed.insert(cast<Instruction>(PoisonUser)).second) |
| 5855 | PoisonStack.push_back(cast<Instruction>(PoisonUser)); |
| 5856 | } else if (auto *BI = dyn_cast<BranchInst>(PoisonUser)) { |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5857 | assert(BI->isConditional() && "Only possibility!"); |
| 5858 | if (BI->getParent() == LatchBB) { |
| 5859 | LatchControlDependentOnPoison = true; |
| 5860 | break; |
| 5861 | } |
| 5862 | } |
| 5863 | } |
| 5864 | } |
| 5865 | |
Sanjoy Das | 97cd7d5 | 2016-06-09 01:13:54 +0000 | [diff] [blame] | 5866 | return LatchControlDependentOnPoison && loopHasNoAbnormalExits(L); |
| 5867 | } |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5868 | |
Sanjoy Das | 5603fc0 | 2016-09-26 02:44:07 +0000 | [diff] [blame] | 5869 | ScalarEvolution::LoopProperties |
| 5870 | ScalarEvolution::getLoopProperties(const Loop *L) { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 5871 | using LoopProperties = ScalarEvolution::LoopProperties; |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 5872 | |
Sanjoy Das | 5603fc0 | 2016-09-26 02:44:07 +0000 | [diff] [blame] | 5873 | auto Itr = LoopPropertiesCache.find(L); |
| 5874 | if (Itr == LoopPropertiesCache.end()) { |
| 5875 | auto HasSideEffects = [](Instruction *I) { |
| 5876 | if (auto *SI = dyn_cast<StoreInst>(I)) |
| 5877 | return !SI->isSimple(); |
| 5878 | |
| 5879 | return I->mayHaveSideEffects(); |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 5880 | }; |
| 5881 | |
Sanjoy Das | 5603fc0 | 2016-09-26 02:44:07 +0000 | [diff] [blame] | 5882 | LoopProperties LP = {/* HasNoAbnormalExits */ true, |
| 5883 | /*HasNoSideEffects*/ true}; |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 5884 | |
Sanjoy Das | 5603fc0 | 2016-09-26 02:44:07 +0000 | [diff] [blame] | 5885 | for (auto *BB : L->getBlocks()) |
| 5886 | for (auto &I : *BB) { |
| 5887 | if (!isGuaranteedToTransferExecutionToSuccessor(&I)) |
| 5888 | LP.HasNoAbnormalExits = false; |
| 5889 | if (HasSideEffects(&I)) |
| 5890 | LP.HasNoSideEffects = false; |
| 5891 | if (!LP.HasNoAbnormalExits && !LP.HasNoSideEffects) |
| 5892 | break; // We're already as pessimistic as we can get. |
| 5893 | } |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 5894 | |
Sanjoy Das | 5603fc0 | 2016-09-26 02:44:07 +0000 | [diff] [blame] | 5895 | auto InsertPair = LoopPropertiesCache.insert({L, LP}); |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5896 | assert(InsertPair.second && "We just checked!"); |
| 5897 | Itr = InsertPair.first; |
| 5898 | } |
| 5899 | |
Sanjoy Das | 97cd7d5 | 2016-06-09 01:13:54 +0000 | [diff] [blame] | 5900 | return Itr->second; |
Sanjoy Das | 7e4a641 | 2016-05-29 00:32:17 +0000 | [diff] [blame] | 5901 | } |
| 5902 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 5903 | const SCEV *ScalarEvolution::createSCEV(Value *V) { |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 5904 | if (!isSCEVable(V->getType())) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5905 | return getUnknown(V); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 5906 | |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 5907 | if (Instruction *I = dyn_cast<Instruction>(V)) { |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 5908 | // Don't attempt to analyze instructions in blocks that aren't |
| 5909 | // reachable. Such instructions don't matter, and they aren't required |
| 5910 | // to obey basic rules for definitions dominating uses which this |
| 5911 | // analysis depends on. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 5912 | if (!DT.isReachableFromEntry(I->getParent())) |
Dan Gohman | 69451a0 | 2010-03-09 23:46:50 +0000 | [diff] [blame] | 5913 | return getUnknown(V); |
Sanjoy Das | 260ad4d | 2016-03-29 16:40:39 +0000 | [diff] [blame] | 5914 | } else if (ConstantInt *CI = dyn_cast<ConstantInt>(V)) |
Dan Gohman | f436bac | 2009-06-24 00:54:57 +0000 | [diff] [blame] | 5915 | return getConstant(CI); |
| 5916 | else if (isa<ConstantPointerNull>(V)) |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 5917 | return getZero(V->getType()); |
Dan Gohman | f161e06e | 2009-08-25 17:49:57 +0000 | [diff] [blame] | 5918 | else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) |
Sanjoy Das | 5ce3272 | 2016-04-08 00:48:30 +0000 | [diff] [blame] | 5919 | return GA->isInterposable() ? getUnknown(V) : getSCEV(GA->getAliasee()); |
Sanjoy Das | 260ad4d | 2016-03-29 16:40:39 +0000 | [diff] [blame] | 5920 | else if (!isa<ConstantExpr>(V)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 5921 | return getUnknown(V); |
Chris Lattner | a3e0bb4 | 2007-04-02 05:41:38 +0000 | [diff] [blame] | 5922 | |
Dan Gohman | 80ca01c | 2009-07-17 20:47:02 +0000 | [diff] [blame] | 5923 | Operator *U = cast<Operator>(V); |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 5924 | if (auto BO = MatchBinaryOp(U, DT)) { |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 5925 | switch (BO->Opcode) { |
| 5926 | case Instruction::Add: { |
| 5927 | // The simple thing to do would be to just call getSCEV on both operands |
| 5928 | // and call getAddExpr with the result. However if we're looking at a |
| 5929 | // bunch of things all added together, this can be quite inefficient, |
| 5930 | // because it leads to N-1 getAddExpr calls for N ultimate operands. |
| 5931 | // Instead, gather up all the operands and make a single getAddExpr call. |
| 5932 | // LLVM IR canonical form means we need only traverse the left operands. |
| 5933 | SmallVector<const SCEV *, 4> AddOps; |
| 5934 | do { |
| 5935 | if (BO->Op) { |
| 5936 | if (auto *OpSCEV = getExistingSCEV(BO->Op)) { |
| 5937 | AddOps.push_back(OpSCEV); |
| 5938 | break; |
| 5939 | } |
Jingyue Wu | 42f1d67 | 2015-07-28 18:22:40 +0000 | [diff] [blame] | 5940 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 5941 | // If a NUW or NSW flag can be applied to the SCEV for this |
| 5942 | // addition, then compute the SCEV for this addition by itself |
| 5943 | // with a separate call to getAddExpr. We need to do that |
| 5944 | // instead of pushing the operands of the addition onto AddOps, |
| 5945 | // since the flags are only known to apply to this particular |
| 5946 | // addition - they may not apply to other additions that can be |
| 5947 | // formed with operands from AddOps. |
| 5948 | const SCEV *RHS = getSCEV(BO->RHS); |
| 5949 | SCEV::NoWrapFlags Flags = getNoWrapFlagsFromUB(BO->Op); |
| 5950 | if (Flags != SCEV::FlagAnyWrap) { |
| 5951 | const SCEV *LHS = getSCEV(BO->LHS); |
| 5952 | if (BO->Opcode == Instruction::Sub) |
| 5953 | AddOps.push_back(getMinusSCEV(LHS, RHS, Flags)); |
| 5954 | else |
| 5955 | AddOps.push_back(getAddExpr(LHS, RHS, Flags)); |
| 5956 | break; |
| 5957 | } |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 5958 | } |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 5959 | |
| 5960 | if (BO->Opcode == Instruction::Sub) |
| 5961 | AddOps.push_back(getNegativeSCEV(getSCEV(BO->RHS))); |
| 5962 | else |
| 5963 | AddOps.push_back(getSCEV(BO->RHS)); |
| 5964 | |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 5965 | auto NewBO = MatchBinaryOp(BO->LHS, DT); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 5966 | if (!NewBO || (NewBO->Opcode != Instruction::Add && |
| 5967 | NewBO->Opcode != Instruction::Sub)) { |
| 5968 | AddOps.push_back(getSCEV(BO->LHS)); |
| 5969 | break; |
| 5970 | } |
| 5971 | BO = NewBO; |
| 5972 | } while (true); |
| 5973 | |
| 5974 | return getAddExpr(AddOps); |
| 5975 | } |
| 5976 | |
| 5977 | case Instruction::Mul: { |
| 5978 | SmallVector<const SCEV *, 4> MulOps; |
| 5979 | do { |
| 5980 | if (BO->Op) { |
| 5981 | if (auto *OpSCEV = getExistingSCEV(BO->Op)) { |
| 5982 | MulOps.push_back(OpSCEV); |
| 5983 | break; |
| 5984 | } |
| 5985 | |
| 5986 | SCEV::NoWrapFlags Flags = getNoWrapFlagsFromUB(BO->Op); |
| 5987 | if (Flags != SCEV::FlagAnyWrap) { |
| 5988 | MulOps.push_back( |
| 5989 | getMulExpr(getSCEV(BO->LHS), getSCEV(BO->RHS), Flags)); |
| 5990 | break; |
| 5991 | } |
| 5992 | } |
| 5993 | |
| 5994 | MulOps.push_back(getSCEV(BO->RHS)); |
Sanjoy Das | f49ca52 | 2016-05-29 00:34:42 +0000 | [diff] [blame] | 5995 | auto NewBO = MatchBinaryOp(BO->LHS, DT); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 5996 | if (!NewBO || NewBO->Opcode != Instruction::Mul) { |
| 5997 | MulOps.push_back(getSCEV(BO->LHS)); |
| 5998 | break; |
| 5999 | } |
NAKAMURA Takumi | 940cd93 | 2016-07-04 01:26:21 +0000 | [diff] [blame] | 6000 | BO = NewBO; |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6001 | } while (true); |
| 6002 | |
| 6003 | return getMulExpr(MulOps); |
| 6004 | } |
| 6005 | case Instruction::UDiv: |
| 6006 | return getUDivExpr(getSCEV(BO->LHS), getSCEV(BO->RHS)); |
Alexandre Isoard | 405728f | 2017-09-01 14:59:59 +0000 | [diff] [blame] | 6007 | case Instruction::URem: |
| 6008 | return getURemExpr(getSCEV(BO->LHS), getSCEV(BO->RHS)); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6009 | case Instruction::Sub: { |
| 6010 | SCEV::NoWrapFlags Flags = SCEV::FlagAnyWrap; |
| 6011 | if (BO->Op) |
| 6012 | Flags = getNoWrapFlagsFromUB(BO->Op); |
| 6013 | return getMinusSCEV(getSCEV(BO->LHS), getSCEV(BO->RHS), Flags); |
| 6014 | } |
| 6015 | case Instruction::And: |
| 6016 | // For an expression like x&255 that merely masks off the high bits, |
| 6017 | // use zext(trunc(x)) as the SCEV expression. |
| 6018 | if (ConstantInt *CI = dyn_cast<ConstantInt>(BO->RHS)) { |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 6019 | if (CI->isZero()) |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6020 | return getSCEV(BO->RHS); |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 6021 | if (CI->isMinusOne()) |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6022 | return getSCEV(BO->LHS); |
| 6023 | const APInt &A = CI->getValue(); |
| 6024 | |
| 6025 | // Instcombine's ShrinkDemandedConstant may strip bits out of |
| 6026 | // constants, obscuring what would otherwise be a low-bits mask. |
| 6027 | // Use computeKnownBits to compute what ShrinkDemandedConstant |
| 6028 | // knew about to reconstruct a low-bits mask value. |
| 6029 | unsigned LZ = A.countLeadingZeros(); |
| 6030 | unsigned TZ = A.countTrailingZeros(); |
| 6031 | unsigned BitWidth = A.getBitWidth(); |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 6032 | KnownBits Known(BitWidth); |
| 6033 | computeKnownBits(BO->LHS, Known, getDataLayout(), |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 6034 | 0, &AC, nullptr, &DT); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6035 | |
| 6036 | APInt EffectiveMask = |
| 6037 | APInt::getLowBitsSet(BitWidth, BitWidth - LZ - TZ).shl(TZ); |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 6038 | if ((LZ != 0 || TZ != 0) && !((~A & ~Known.Zero) & EffectiveMask)) { |
Eli Friedman | f1f49c8 | 2017-01-18 23:56:42 +0000 | [diff] [blame] | 6039 | const SCEV *MulCount = getConstant(APInt::getOneBitSet(BitWidth, TZ)); |
| 6040 | const SCEV *LHS = getSCEV(BO->LHS); |
| 6041 | const SCEV *ShiftedLHS = nullptr; |
| 6042 | if (auto *LHSMul = dyn_cast<SCEVMulExpr>(LHS)) { |
| 6043 | if (auto *OpC = dyn_cast<SCEVConstant>(LHSMul->getOperand(0))) { |
| 6044 | // For an expression like (x * 8) & 8, simplify the multiply. |
| 6045 | unsigned MulZeros = OpC->getAPInt().countTrailingZeros(); |
| 6046 | unsigned GCD = std::min(MulZeros, TZ); |
| 6047 | APInt DivAmt = APInt::getOneBitSet(BitWidth, TZ - GCD); |
| 6048 | SmallVector<const SCEV*, 4> MulOps; |
| 6049 | MulOps.push_back(getConstant(OpC->getAPInt().lshr(GCD))); |
| 6050 | MulOps.append(LHSMul->op_begin() + 1, LHSMul->op_end()); |
| 6051 | auto *NewMul = getMulExpr(MulOps, LHSMul->getNoWrapFlags()); |
| 6052 | ShiftedLHS = getUDivExpr(NewMul, getConstant(DivAmt)); |
| 6053 | } |
| 6054 | } |
| 6055 | if (!ShiftedLHS) |
| 6056 | ShiftedLHS = getUDivExpr(LHS, MulCount); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6057 | return getMulExpr( |
| 6058 | getZeroExtendExpr( |
Eli Friedman | f1f49c8 | 2017-01-18 23:56:42 +0000 | [diff] [blame] | 6059 | getTruncateExpr(ShiftedLHS, |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6060 | IntegerType::get(getContext(), BitWidth - LZ - TZ)), |
| 6061 | BO->LHS->getType()), |
| 6062 | MulCount); |
| 6063 | } |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 6064 | } |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6065 | break; |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 6066 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6067 | case Instruction::Or: |
Eli Friedman | d0e6ae56 | 2017-04-20 23:59:05 +0000 | [diff] [blame] | 6068 | // If the RHS of the Or is a constant, we may have something like: |
| 6069 | // X*4+1 which got turned into X*4|1. Handle this as an Add so loop |
| 6070 | // optimizations will transparently handle this case. |
| 6071 | // |
| 6072 | // In order for this transformation to be safe, the LHS must be of the |
| 6073 | // form X*(2^n) and the Or constant must be less than 2^n. |
| 6074 | if (ConstantInt *CI = dyn_cast<ConstantInt>(BO->RHS)) { |
| 6075 | const SCEV *LHS = getSCEV(BO->LHS); |
| 6076 | const APInt &CIVal = CI->getValue(); |
| 6077 | if (GetMinTrailingZeros(LHS) >= |
| 6078 | (CIVal.getBitWidth() - CIVal.countLeadingZeros())) { |
| 6079 | // Build a plain add SCEV. |
| 6080 | const SCEV *S = getAddExpr(LHS, getSCEV(CI)); |
| 6081 | // If the LHS of the add was an addrec and it has no-wrap flags, |
| 6082 | // transfer the no-wrap flags, since an or won't introduce a wrap. |
| 6083 | if (const SCEVAddRecExpr *NewAR = dyn_cast<SCEVAddRecExpr>(S)) { |
| 6084 | const SCEVAddRecExpr *OldAR = cast<SCEVAddRecExpr>(LHS); |
| 6085 | const_cast<SCEVAddRecExpr *>(NewAR)->setNoWrapFlags( |
| 6086 | OldAR->getNoWrapFlags()); |
| 6087 | } |
| 6088 | return S; |
| 6089 | } |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6090 | } |
| 6091 | break; |
Dan Gohman | 6350296e | 2009-05-18 16:29:04 +0000 | [diff] [blame] | 6092 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6093 | case Instruction::Xor: |
| 6094 | if (ConstantInt *CI = dyn_cast<ConstantInt>(BO->RHS)) { |
| 6095 | // If the RHS of xor is -1, then this is a not operation. |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 6096 | if (CI->isMinusOne()) |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6097 | return getNotSCEV(getSCEV(BO->LHS)); |
Dan Gohman | eddf771 | 2009-06-18 00:00:20 +0000 | [diff] [blame] | 6098 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6099 | // Model xor(and(x, C), C) as and(~x, C), if C is a low-bits mask. |
| 6100 | // This is a variant of the check for xor with -1, and it handles |
| 6101 | // the case where instcombine has trimmed non-demanded bits out |
| 6102 | // of an xor with -1. |
| 6103 | if (auto *LBO = dyn_cast<BinaryOperator>(BO->LHS)) |
| 6104 | if (ConstantInt *LCI = dyn_cast<ConstantInt>(LBO->getOperand(1))) |
| 6105 | if (LBO->getOpcode() == Instruction::And && |
| 6106 | LCI->getValue() == CI->getValue()) |
| 6107 | if (const SCEVZeroExtendExpr *Z = |
| 6108 | dyn_cast<SCEVZeroExtendExpr>(getSCEV(BO->LHS))) { |
| 6109 | Type *UTy = BO->LHS->getType(); |
| 6110 | const SCEV *Z0 = Z->getOperand(); |
| 6111 | Type *Z0Ty = Z0->getType(); |
| 6112 | unsigned Z0TySize = getTypeSizeInBits(Z0Ty); |
Dan Gohman | eddf771 | 2009-06-18 00:00:20 +0000 | [diff] [blame] | 6113 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6114 | // If C is a low-bits mask, the zero extend is serving to |
| 6115 | // mask off the high bits. Complement the operand and |
| 6116 | // re-apply the zext. |
Craig Topper | d33ee1b | 2017-04-03 16:34:59 +0000 | [diff] [blame] | 6117 | if (CI->getValue().isMask(Z0TySize)) |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6118 | return getZeroExtendExpr(getNotSCEV(Z0), UTy); |
| 6119 | |
| 6120 | // If C is a single bit, it may be in the sign-bit position |
| 6121 | // before the zero-extend. In this case, represent the xor |
| 6122 | // using an add, which is equivalent, and re-apply the zext. |
| 6123 | APInt Trunc = CI->getValue().trunc(Z0TySize); |
| 6124 | if (Trunc.zext(getTypeSizeInBits(UTy)) == CI->getValue() && |
Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 6125 | Trunc.isSignMask()) |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6126 | return getZeroExtendExpr(getAddExpr(Z0, getConstant(Trunc)), |
| 6127 | UTy); |
| 6128 | } |
| 6129 | } |
| 6130 | break; |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6131 | |
| 6132 | case Instruction::Shl: |
| 6133 | // Turn shift left of a constant amount into a multiply. |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6134 | if (ConstantInt *SA = dyn_cast<ConstantInt>(BO->RHS)) { |
| 6135 | uint32_t BitWidth = cast<IntegerType>(SA->getType())->getBitWidth(); |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 6136 | |
| 6137 | // If the shift count is not less than the bitwidth, the result of |
| 6138 | // the shift is undefined. Don't try to analyze it, because the |
| 6139 | // resolution chosen here may differ from the resolution chosen in |
| 6140 | // other parts of the compiler. |
| 6141 | if (SA->getValue().uge(BitWidth)) |
| 6142 | break; |
| 6143 | |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 6144 | // It is currently not resolved how to interpret NSW for left |
| 6145 | // shift by BitWidth - 1, so we avoid applying flags in that |
| 6146 | // case. Remove this check (or this comment) once the situation |
| 6147 | // is resolved. See |
| 6148 | // http://lists.llvm.org/pipermail/llvm-dev/2015-April/084195.html |
| 6149 | // and http://reviews.llvm.org/D8890 . |
| 6150 | auto Flags = SCEV::FlagAnyWrap; |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6151 | if (BO->Op && SA->getValue().ult(BitWidth - 1)) |
| 6152 | Flags = getNoWrapFlagsFromUB(BO->Op); |
Bjarke Hammersholt Roune | 9791ed4 | 2015-08-14 22:45:26 +0000 | [diff] [blame] | 6153 | |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 6154 | Constant *X = ConstantInt::get(getContext(), |
Benjamin Kramer | fc3ea6f | 2013-07-11 16:05:50 +0000 | [diff] [blame] | 6155 | APInt::getOneBitSet(BitWidth, SA->getZExtValue())); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6156 | return getMulExpr(getSCEV(BO->LHS), getSCEV(X), Flags); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6157 | } |
| 6158 | break; |
| 6159 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 6160 | case Instruction::AShr: { |
Zhaoshi Zheng | e3c9070 | 2017-03-23 18:06:09 +0000 | [diff] [blame] | 6161 | // AShr X, C, where C is a constant. |
| 6162 | ConstantInt *CI = dyn_cast<ConstantInt>(BO->RHS); |
| 6163 | if (!CI) |
| 6164 | break; |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 6165 | |
Zhaoshi Zheng | e3c9070 | 2017-03-23 18:06:09 +0000 | [diff] [blame] | 6166 | Type *OuterTy = BO->LHS->getType(); |
| 6167 | uint64_t BitWidth = getTypeSizeInBits(OuterTy); |
| 6168 | // If the shift count is not less than the bitwidth, the result of |
| 6169 | // the shift is undefined. Don't try to analyze it, because the |
| 6170 | // resolution chosen here may differ from the resolution chosen in |
| 6171 | // other parts of the compiler. |
| 6172 | if (CI->getValue().uge(BitWidth)) |
| 6173 | break; |
Dan Gohman | acd700a | 2010-04-22 01:35:11 +0000 | [diff] [blame] | 6174 | |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 6175 | if (CI->isZero()) |
Zhaoshi Zheng | e3c9070 | 2017-03-23 18:06:09 +0000 | [diff] [blame] | 6176 | return getSCEV(BO->LHS); // shift by zero --> noop |
| 6177 | |
| 6178 | uint64_t AShrAmt = CI->getZExtValue(); |
| 6179 | Type *TruncTy = IntegerType::get(getContext(), BitWidth - AShrAmt); |
| 6180 | |
| 6181 | Operator *L = dyn_cast<Operator>(BO->LHS); |
| 6182 | if (L && L->getOpcode() == Instruction::Shl) { |
| 6183 | // X = Shl A, n |
| 6184 | // Y = AShr X, m |
| 6185 | // Both n and m are constant. |
| 6186 | |
| 6187 | const SCEV *ShlOp0SCEV = getSCEV(L->getOperand(0)); |
| 6188 | if (L->getOperand(1) == BO->RHS) |
| 6189 | // For a two-shift sext-inreg, i.e. n = m, |
| 6190 | // use sext(trunc(x)) as the SCEV expression. |
| 6191 | return getSignExtendExpr( |
| 6192 | getTruncateExpr(ShlOp0SCEV, TruncTy), OuterTy); |
| 6193 | |
| 6194 | ConstantInt *ShlAmtCI = dyn_cast<ConstantInt>(L->getOperand(1)); |
| 6195 | if (ShlAmtCI && ShlAmtCI->getValue().ult(BitWidth)) { |
| 6196 | uint64_t ShlAmt = ShlAmtCI->getZExtValue(); |
| 6197 | if (ShlAmt > AShrAmt) { |
| 6198 | // When n > m, use sext(mul(trunc(x), 2^(n-m)))) as the SCEV |
| 6199 | // expression. We already checked that ShlAmt < BitWidth, so |
| 6200 | // the multiplier, 1 << (ShlAmt - AShrAmt), fits into TruncTy as |
| 6201 | // ShlAmt - AShrAmt < Amt. |
| 6202 | APInt Mul = APInt::getOneBitSet(BitWidth - AShrAmt, |
| 6203 | ShlAmt - AShrAmt); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6204 | return getSignExtendExpr( |
Zhaoshi Zheng | e3c9070 | 2017-03-23 18:06:09 +0000 | [diff] [blame] | 6205 | getMulExpr(getTruncateExpr(ShlOp0SCEV, TruncTy), |
| 6206 | getConstant(Mul)), OuterTy); |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6207 | } |
Zhaoshi Zheng | e3c9070 | 2017-03-23 18:06:09 +0000 | [diff] [blame] | 6208 | } |
| 6209 | } |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6210 | break; |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 6211 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 6212 | } |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6213 | } |
Nick Lewycky | f5c547d | 2008-07-07 06:15:49 +0000 | [diff] [blame] | 6214 | |
Sanjoy Das | 2381fcd | 2016-03-29 16:40:44 +0000 | [diff] [blame] | 6215 | switch (U->getOpcode()) { |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6216 | case Instruction::Trunc: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6217 | return getTruncateExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6218 | |
| 6219 | case Instruction::ZExt: |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6220 | return getZeroExtendExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6221 | |
| 6222 | case Instruction::SExt: |
Amara Emerson | 56dca4e3 | 2017-08-04 20:19:46 +0000 | [diff] [blame] | 6223 | if (auto BO = MatchBinaryOp(U->getOperand(0), DT)) { |
| 6224 | // The NSW flag of a subtract does not always survive the conversion to |
| 6225 | // A + (-1)*B. By pushing sign extension onto its operands we are much |
| 6226 | // more likely to preserve NSW and allow later AddRec optimisations. |
| 6227 | // |
| 6228 | // NOTE: This is effectively duplicating this logic from getSignExtend: |
| 6229 | // sext((A + B + ...)<nsw>) --> (sext(A) + sext(B) + ...)<nsw> |
| 6230 | // but by that point the NSW information has potentially been lost. |
| 6231 | if (BO->Opcode == Instruction::Sub && BO->IsNSW) { |
| 6232 | Type *Ty = U->getType(); |
| 6233 | auto *V1 = getSignExtendExpr(getSCEV(BO->LHS), Ty); |
| 6234 | auto *V2 = getSignExtendExpr(getSCEV(BO->RHS), Ty); |
| 6235 | return getMinusSCEV(V1, V2, SCEV::FlagNSW); |
| 6236 | } |
| 6237 | } |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6238 | return getSignExtendExpr(getSCEV(U->getOperand(0)), U->getType()); |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6239 | |
| 6240 | case Instruction::BitCast: |
| 6241 | // BitCasts are no-op casts so we just eliminate the cast. |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 6242 | if (isSCEVable(U->getType()) && isSCEVable(U->getOperand(0)->getType())) |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6243 | return getSCEV(U->getOperand(0)); |
| 6244 | break; |
| 6245 | |
Dan Gohman | e5e1b7b | 2010-02-01 18:27:38 +0000 | [diff] [blame] | 6246 | // It's tempting to handle inttoptr and ptrtoint as no-ops, however this can |
| 6247 | // lead to pointer expressions which cannot safely be expanded to GEPs, |
| 6248 | // because ScalarEvolution doesn't respect the GEP aliasing rules when |
| 6249 | // simplifying integer expressions. |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 6250 | |
Dan Gohman | ee750d1 | 2009-05-08 20:26:55 +0000 | [diff] [blame] | 6251 | case Instruction::GetElementPtr: |
Dan Gohman | b256ccf | 2009-12-18 02:09:29 +0000 | [diff] [blame] | 6252 | return createNodeForGEP(cast<GEPOperator>(U)); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 6253 | |
Dan Gohman | 05e8973 | 2008-06-22 19:56:46 +0000 | [diff] [blame] | 6254 | case Instruction::PHI: |
| 6255 | return createNodeForPHI(cast<PHINode>(U)); |
| 6256 | |
| 6257 | case Instruction::Select: |
Sanjoy Das | d067134 | 2015-10-02 19:39:59 +0000 | [diff] [blame] | 6258 | // U can also be a select constant expr, which let fall through. Since |
| 6259 | // createNodeForSelect only works for a condition that is an `ICmpInst`, and |
| 6260 | // constant expressions cannot have instructions as operands, we'd have |
| 6261 | // returned getUnknown for a select constant expressions anyway. |
| 6262 | if (isa<Instruction>(U)) |
Sanjoy Das | 55015d2 | 2015-10-02 23:09:44 +0000 | [diff] [blame] | 6263 | return createNodeForSelectOrPHI(cast<Instruction>(U), U->getOperand(0), |
| 6264 | U->getOperand(1), U->getOperand(2)); |
Hal Finkel | e186deb | 2016-07-11 02:48:23 +0000 | [diff] [blame] | 6265 | break; |
| 6266 | |
| 6267 | case Instruction::Call: |
| 6268 | case Instruction::Invoke: |
| 6269 | if (Value *RV = CallSite(U).getReturnedArgOperand()) |
| 6270 | return getSCEV(RV); |
| 6271 | break; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6272 | } |
| 6273 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 6274 | return getUnknown(V); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6275 | } |
| 6276 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6277 | //===----------------------------------------------------------------------===// |
| 6278 | // Iteration Count Computation Code |
| 6279 | // |
| 6280 | |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 6281 | static unsigned getConstantTripCount(const SCEVConstant *ExitCount) { |
| 6282 | if (!ExitCount) |
| 6283 | return 0; |
| 6284 | |
| 6285 | ConstantInt *ExitConst = ExitCount->getValue(); |
| 6286 | |
| 6287 | // Guard against huge trip counts. |
| 6288 | if (ExitConst->getValue().getActiveBits() > 32) |
| 6289 | return 0; |
| 6290 | |
| 6291 | // In case of integer overflow, this returns 0, which is correct. |
| 6292 | return ((unsigned)ExitConst->getZExtValue()) + 1; |
| 6293 | } |
| 6294 | |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6295 | unsigned ScalarEvolution::getSmallConstantTripCount(const Loop *L) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 6296 | if (BasicBlock *ExitingBB = L->getExitingBlock()) |
| 6297 | return getSmallConstantTripCount(L, ExitingBB); |
| 6298 | |
| 6299 | // No trip count information for multiple exits. |
| 6300 | return 0; |
| 6301 | } |
| 6302 | |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6303 | unsigned ScalarEvolution::getSmallConstantTripCount(const Loop *L, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6304 | BasicBlock *ExitingBlock) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 6305 | assert(ExitingBlock && "Must pass a non-null exiting block!"); |
| 6306 | assert(L->isLoopExiting(ExitingBlock) && |
| 6307 | "Exiting block must actually branch out of the loop!"); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6308 | const SCEVConstant *ExitCount = |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6309 | dyn_cast<SCEVConstant>(getExitCount(L, ExitingBlock)); |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 6310 | return getConstantTripCount(ExitCount); |
| 6311 | } |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6312 | |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6313 | unsigned ScalarEvolution::getSmallConstantMaxTripCount(const Loop *L) { |
Haicheng Wu | 1ef17e9 | 2016-10-12 21:29:38 +0000 | [diff] [blame] | 6314 | const auto *MaxExitCount = |
| 6315 | dyn_cast<SCEVConstant>(getMaxBackedgeTakenCount(L)); |
| 6316 | return getConstantTripCount(MaxExitCount); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6317 | } |
| 6318 | |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6319 | unsigned ScalarEvolution::getSmallConstantTripMultiple(const Loop *L) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 6320 | if (BasicBlock *ExitingBB = L->getExitingBlock()) |
| 6321 | return getSmallConstantTripMultiple(L, ExitingBB); |
| 6322 | |
| 6323 | // No trip multiple information for multiple exits. |
| 6324 | return 0; |
| 6325 | } |
| 6326 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6327 | /// Returns the largest constant divisor of the trip count of this loop as a |
| 6328 | /// normal unsigned value, if possible. This means that the actual trip count is |
| 6329 | /// always a multiple of the returned value (don't forget the trip count could |
| 6330 | /// very well be zero as well!). |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6331 | /// |
| 6332 | /// Returns 1 if the trip count is unknown or not guaranteed to be the |
| 6333 | /// multiple of a constant (which is also the case if the trip count is simply |
| 6334 | /// constant, use getSmallConstantTripCount for that case), Will also return 1 |
| 6335 | /// if the trip count is very large (>= 2^32). |
Andrew Trick | e81211f | 2012-01-11 06:52:55 +0000 | [diff] [blame] | 6336 | /// |
| 6337 | /// As explained in the comments for getSmallConstantTripCount, this assumes |
| 6338 | /// that control exits the loop via ExitingBlock. |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6339 | unsigned |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6340 | ScalarEvolution::getSmallConstantTripMultiple(const Loop *L, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6341 | BasicBlock *ExitingBlock) { |
Chandler Carruth | 6666c27 | 2014-10-11 00:12:11 +0000 | [diff] [blame] | 6342 | assert(ExitingBlock && "Must pass a non-null exiting block!"); |
| 6343 | assert(L->isLoopExiting(ExitingBlock) && |
| 6344 | "Exiting block must actually branch out of the loop!"); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6345 | const SCEV *ExitCount = getExitCount(L, ExitingBlock); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6346 | if (ExitCount == getCouldNotCompute()) |
| 6347 | return 1; |
| 6348 | |
| 6349 | // Get the trip count from the BE count by adding 1. |
Eli Friedman | b1578d3 | 2017-03-20 20:25:46 +0000 | [diff] [blame] | 6350 | const SCEV *TCExpr = getAddExpr(ExitCount, getOne(ExitCount->getType())); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6351 | |
Eli Friedman | b1578d3 | 2017-03-20 20:25:46 +0000 | [diff] [blame] | 6352 | const SCEVConstant *TC = dyn_cast<SCEVConstant>(TCExpr); |
| 6353 | if (!TC) |
| 6354 | // Attempt to factor more general cases. Returns the greatest power of |
| 6355 | // two divisor. If overflow happens, the trip count expression is still |
| 6356 | // divisible by the greatest power of 2 divisor returned. |
| 6357 | return 1U << std::min((uint32_t)31, GetMinTrailingZeros(TCExpr)); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6358 | |
Eli Friedman | b1578d3 | 2017-03-20 20:25:46 +0000 | [diff] [blame] | 6359 | ConstantInt *Result = TC->getValue(); |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6360 | |
Hal Finkel | 30bd934 | 2012-10-24 19:46:44 +0000 | [diff] [blame] | 6361 | // Guard against huge trip counts (this requires checking |
| 6362 | // for zero to handle the case where the trip count == -1 and the |
| 6363 | // addition wraps). |
| 6364 | if (!Result || Result->getValue().getActiveBits() > 32 || |
| 6365 | Result->getValue().getActiveBits() == 0) |
Andrew Trick | 2b6860f | 2011-08-11 23:36:16 +0000 | [diff] [blame] | 6366 | return 1; |
| 6367 | |
| 6368 | return (unsigned)Result->getZExtValue(); |
| 6369 | } |
| 6370 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6371 | /// Get the expression for the number of loop iterations for which this loop is |
| 6372 | /// guaranteed not to exit via ExitingBlock. Otherwise return |
| 6373 | /// SCEVCouldNotCompute. |
Eli Friedman | f7b060b | 2017-03-17 22:19:52 +0000 | [diff] [blame] | 6374 | const SCEV *ScalarEvolution::getExitCount(const Loop *L, |
| 6375 | BasicBlock *ExitingBlock) { |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 6376 | return getBackedgeTakenInfo(L).getExact(ExitingBlock, this); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6377 | } |
| 6378 | |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6379 | const SCEV * |
| 6380 | ScalarEvolution::getPredicatedBackedgeTakenCount(const Loop *L, |
| 6381 | SCEVUnionPredicate &Preds) { |
| 6382 | return getPredicatedBackedgeTakenInfo(L).getExact(this, &Preds); |
| 6383 | } |
| 6384 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6385 | const SCEV *ScalarEvolution::getBackedgeTakenCount(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6386 | return getBackedgeTakenInfo(L).getExact(this); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6387 | } |
| 6388 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6389 | /// Similar to getBackedgeTakenCount, except return the least SCEV value that is |
| 6390 | /// known never to be less than the actual backedge taken count. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6391 | const SCEV *ScalarEvolution::getMaxBackedgeTakenCount(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6392 | return getBackedgeTakenInfo(L).getMax(this); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6393 | } |
| 6394 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6395 | bool ScalarEvolution::isBackedgeTakenCountMaxOrZero(const Loop *L) { |
| 6396 | return getBackedgeTakenInfo(L).isMaxOrZero(this); |
| 6397 | } |
| 6398 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6399 | /// Push PHI nodes in the header of the given loop onto the given Worklist. |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6400 | static void |
| 6401 | PushLoopPHIs(const Loop *L, SmallVectorImpl<Instruction *> &Worklist) { |
| 6402 | BasicBlock *Header = L->getHeader(); |
| 6403 | |
| 6404 | // Push all Loop-header PHIs onto the Worklist stack. |
| 6405 | for (BasicBlock::iterator I = Header->begin(); |
| 6406 | PHINode *PN = dyn_cast<PHINode>(I); ++I) |
| 6407 | Worklist.push_back(PN); |
| 6408 | } |
| 6409 | |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6410 | const ScalarEvolution::BackedgeTakenInfo & |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6411 | ScalarEvolution::getPredicatedBackedgeTakenInfo(const Loop *L) { |
| 6412 | auto &BTI = getBackedgeTakenInfo(L); |
| 6413 | if (BTI.hasFullInfo()) |
| 6414 | return BTI; |
| 6415 | |
| 6416 | auto Pair = PredicatedBackedgeTakenCounts.insert({L, BackedgeTakenInfo()}); |
| 6417 | |
| 6418 | if (!Pair.second) |
| 6419 | return Pair.first->second; |
| 6420 | |
| 6421 | BackedgeTakenInfo Result = |
| 6422 | computeBackedgeTakenCount(L, /*AllowPredicates=*/true); |
| 6423 | |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6424 | return PredicatedBackedgeTakenCounts.find(L)->second = std::move(Result); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6425 | } |
| 6426 | |
| 6427 | const ScalarEvolution::BackedgeTakenInfo & |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6428 | ScalarEvolution::getBackedgeTakenInfo(const Loop *L) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6429 | // Initially insert an invalid entry for this loop. If the insertion |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 6430 | // succeeds, proceed to actually compute a backedge-taken count and |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 6431 | // update the value. The temporary CouldNotCompute value tells SCEV |
| 6432 | // code elsewhere that it shouldn't attempt to request a new |
| 6433 | // backedge-taken count, which could result in infinite recursion. |
Dan Gohman | 0daf687 | 2011-05-09 18:44:09 +0000 | [diff] [blame] | 6434 | std::pair<DenseMap<const Loop *, BackedgeTakenInfo>::iterator, bool> Pair = |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 6435 | BackedgeTakenCounts.insert({L, BackedgeTakenInfo()}); |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6436 | if (!Pair.second) |
| 6437 | return Pair.first->second; |
Dan Gohman | 7646637 | 2009-04-27 20:16:15 +0000 | [diff] [blame] | 6438 | |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 6439 | // computeBackedgeTakenCount may allocate memory for its result. Inserting it |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6440 | // into the BackedgeTakenCounts map transfers ownership. Otherwise, the result |
| 6441 | // must be cleared in this scope. |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 6442 | BackedgeTakenInfo Result = computeBackedgeTakenCount(L); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6443 | |
| 6444 | if (Result.getExact(this) != getCouldNotCompute()) { |
| 6445 | assert(isLoopInvariant(Result.getExact(this), L) && |
| 6446 | isLoopInvariant(Result.getMax(this), L) && |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6447 | "Computed backedge-taken count isn't loop invariant for loop!"); |
| 6448 | ++NumTripCountsComputed; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6449 | } |
| 6450 | else if (Result.getMax(this) == getCouldNotCompute() && |
| 6451 | isa<PHINode>(L->getHeader()->begin())) { |
| 6452 | // Only count loops that have phi nodes as not being computable. |
| 6453 | ++NumTripCountsNotComputed; |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6454 | } |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6455 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6456 | // Now that we know more about the trip count for this loop, forget any |
| 6457 | // existing SCEV values for PHI nodes in this loop since they are only |
| 6458 | // conservative estimates made without the benefit of trip count |
| 6459 | // information. This is similar to the code in forgetLoop, except that |
| 6460 | // it handles SCEVUnknown PHI nodes specially. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6461 | if (Result.hasAnyInfo()) { |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6462 | SmallVector<Instruction *, 16> Worklist; |
| 6463 | PushLoopPHIs(L, Worklist); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6464 | |
Sanjoy Das | adf3751 | 2017-12-04 19:22:01 +0000 | [diff] [blame] | 6465 | SmallPtrSet<Instruction *, 8> Discovered; |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6466 | while (!Worklist.empty()) { |
| 6467 | Instruction *I = Worklist.pop_back_val(); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6468 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6469 | ValueExprMapType::iterator It = |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 6470 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6471 | if (It != ValueExprMap.end()) { |
| 6472 | const SCEV *Old = It->second; |
Dan Gohman | 761065e | 2010-11-17 02:44:44 +0000 | [diff] [blame] | 6473 | |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6474 | // SCEVUnknown for a PHI either means that it has an unrecognized |
| 6475 | // structure, or it's a PHI that's in the progress of being computed |
| 6476 | // by createNodeForPHI. In the former case, additional loop trip |
| 6477 | // count information isn't going to change anything. In the later |
| 6478 | // case, createNodeForPHI will perform the necessary updates on its |
| 6479 | // own when it gets to that point. |
| 6480 | if (!isa<PHINode>(I) || !isa<SCEVUnknown>(Old)) { |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 6481 | eraseValueFromMap(It->first); |
Sanjoy Das | 7e36337 | 2017-12-04 19:22:00 +0000 | [diff] [blame] | 6482 | forgetMemoizedResults(Old); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6483 | } |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6484 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 6485 | ConstantEvolutionLoopExitValue.erase(PN); |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6486 | } |
Chris Lattner | a337f5e | 2011-01-09 02:16:18 +0000 | [diff] [blame] | 6487 | |
Sanjoy Das | 7e36337 | 2017-12-04 19:22:00 +0000 | [diff] [blame] | 6488 | // Since we don't need to invalidate anything for correctness and we're |
| 6489 | // only invalidating to make SCEV's results more precise, we get to stop |
| 6490 | // early to avoid invalidating too much. This is especially important in |
| 6491 | // cases like: |
| 6492 | // |
| 6493 | // %v = f(pn0, pn1) // pn0 and pn1 used through some other phi node |
| 6494 | // loop0: |
| 6495 | // %pn0 = phi |
| 6496 | // ... |
| 6497 | // loop1: |
| 6498 | // %pn1 = phi |
| 6499 | // ... |
| 6500 | // |
| 6501 | // where both loop0 and loop1's backedge taken count uses the SCEV |
| 6502 | // expression for %v. If we don't have the early stop below then in cases |
| 6503 | // like the above, getBackedgeTakenInfo(loop1) will clear out the trip |
| 6504 | // count for loop0 and getBackedgeTakenInfo(loop0) will clear out the trip |
| 6505 | // count for loop1, effectively nullifying SCEV's trip count cache. |
| 6506 | for (auto *U : I->users()) |
| 6507 | if (auto *I = dyn_cast<Instruction>(U)) { |
| 6508 | auto *LoopForUser = LI.getLoopFor(I->getParent()); |
Sanjoy Das | adf3751 | 2017-12-04 19:22:01 +0000 | [diff] [blame] | 6509 | if (LoopForUser && L->contains(LoopForUser) && |
| 6510 | Discovered.insert(I).second) |
Sanjoy Das | 7e36337 | 2017-12-04 19:22:00 +0000 | [diff] [blame] | 6511 | Worklist.push_back(I); |
| 6512 | } |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6513 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6514 | } |
Dan Gohman | 6acd95b | 2011-04-25 22:48:29 +0000 | [diff] [blame] | 6515 | |
| 6516 | // Re-lookup the insert position, since the call to |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 6517 | // computeBackedgeTakenCount above could result in a |
Dan Gohman | 6acd95b | 2011-04-25 22:48:29 +0000 | [diff] [blame] | 6518 | // recusive call to getBackedgeTakenInfo (on a different |
| 6519 | // loop), which would invalidate the iterator computed |
| 6520 | // earlier. |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6521 | return BackedgeTakenCounts.find(L)->second = std::move(Result); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6522 | } |
| 6523 | |
Dan Gohman | 880c92a | 2009-10-31 15:04:55 +0000 | [diff] [blame] | 6524 | void ScalarEvolution::forgetLoop(const Loop *L) { |
| 6525 | // Drop any stored trip count value. |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6526 | auto RemoveLoopFromBackedgeMap = |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6527 | [](DenseMap<const Loop *, BackedgeTakenInfo> &Map, const Loop *L) { |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6528 | auto BTCPos = Map.find(L); |
| 6529 | if (BTCPos != Map.end()) { |
| 6530 | BTCPos->second.clear(); |
| 6531 | Map.erase(BTCPos); |
| 6532 | } |
| 6533 | }; |
| 6534 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6535 | SmallVector<const Loop *, 16> LoopWorklist(1, L); |
| 6536 | SmallVector<Instruction *, 32> Worklist; |
| 6537 | SmallPtrSet<Instruction *, 16> Visited; |
Dan Gohman | f150572 | 2009-05-02 17:43:35 +0000 | [diff] [blame] | 6538 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6539 | // Iterate over all the loops and sub-loops to drop SCEV information. |
| 6540 | while (!LoopWorklist.empty()) { |
| 6541 | auto *CurrL = LoopWorklist.pop_back_val(); |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 6542 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6543 | RemoveLoopFromBackedgeMap(BackedgeTakenCounts, CurrL); |
| 6544 | RemoveLoopFromBackedgeMap(PredicatedBackedgeTakenCounts, CurrL); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6545 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6546 | // Drop information about predicated SCEV rewrites for this loop. |
| 6547 | for (auto I = PredicatedSCEVRewrites.begin(); |
| 6548 | I != PredicatedSCEVRewrites.end();) { |
| 6549 | std::pair<const SCEV *, const Loop *> Entry = I->first; |
| 6550 | if (Entry.second == CurrL) |
| 6551 | PredicatedSCEVRewrites.erase(I++); |
| 6552 | else |
| 6553 | ++I; |
Dan Gohman | dc19104 | 2009-07-08 19:23:34 +0000 | [diff] [blame] | 6554 | } |
| 6555 | |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 6556 | auto LoopUsersItr = LoopUsers.find(CurrL); |
| 6557 | if (LoopUsersItr != LoopUsers.end()) { |
Sanjoy Das | 3a5e252 | 2017-10-17 01:03:56 +0000 | [diff] [blame] | 6558 | for (auto *S : LoopUsersItr->second) |
| 6559 | forgetMemoizedResults(S); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 6560 | LoopUsers.erase(LoopUsersItr); |
| 6561 | } |
| 6562 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6563 | // Drop information about expressions based on loop-header PHIs. |
| 6564 | PushLoopPHIs(CurrL, Worklist); |
| 6565 | |
| 6566 | while (!Worklist.empty()) { |
| 6567 | Instruction *I = Worklist.pop_back_val(); |
| 6568 | if (!Visited.insert(I).second) |
| 6569 | continue; |
| 6570 | |
| 6571 | ValueExprMapType::iterator It = |
| 6572 | ValueExprMap.find_as(static_cast<Value *>(I)); |
| 6573 | if (It != ValueExprMap.end()) { |
| 6574 | eraseValueFromMap(It->first); |
| 6575 | forgetMemoizedResults(It->second); |
| 6576 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 6577 | ConstantEvolutionLoopExitValue.erase(PN); |
| 6578 | } |
| 6579 | |
| 6580 | PushDefUseChildren(I, Worklist); |
| 6581 | } |
| 6582 | |
Marcello Maggioni | ce90060 | 2017-09-11 15:44:20 +0000 | [diff] [blame] | 6583 | LoopPropertiesCache.erase(CurrL); |
| 6584 | // Forget all contained loops too, to avoid dangling entries in the |
| 6585 | // ValuesAtScopes map. |
| 6586 | LoopWorklist.append(CurrL->begin(), CurrL->end()); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 6587 | } |
Dan Gohman | 4330034 | 2009-02-17 20:49:49 +0000 | [diff] [blame] | 6588 | } |
| 6589 | |
Eric Christopher | ef6d593 | 2010-07-29 01:25:38 +0000 | [diff] [blame] | 6590 | void ScalarEvolution::forgetValue(Value *V) { |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 6591 | Instruction *I = dyn_cast<Instruction>(V); |
| 6592 | if (!I) return; |
| 6593 | |
| 6594 | // Drop information about expressions based on loop-header PHIs. |
| 6595 | SmallVector<Instruction *, 16> Worklist; |
| 6596 | Worklist.push_back(I); |
| 6597 | |
| 6598 | SmallPtrSet<Instruction *, 8> Visited; |
| 6599 | while (!Worklist.empty()) { |
| 6600 | I = Worklist.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 6601 | if (!Visited.insert(I).second) |
| 6602 | continue; |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 6603 | |
Benjamin Kramer | e2ef47c | 2012-06-30 22:37:15 +0000 | [diff] [blame] | 6604 | ValueExprMapType::iterator It = |
| 6605 | ValueExprMap.find_as(static_cast<Value *>(I)); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 6606 | if (It != ValueExprMap.end()) { |
Wei Mi | 785858c | 2016-08-09 20:37:50 +0000 | [diff] [blame] | 6607 | eraseValueFromMap(It->first); |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 6608 | forgetMemoizedResults(It->second); |
Dale Johannesen | 1d6827a | 2010-02-19 07:14:22 +0000 | [diff] [blame] | 6609 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 6610 | ConstantEvolutionLoopExitValue.erase(PN); |
| 6611 | } |
| 6612 | |
| 6613 | PushDefUseChildren(I, Worklist); |
| 6614 | } |
| 6615 | } |
| 6616 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6617 | /// Get the exact loop backedge taken count considering all loop exits. A |
| 6618 | /// computable result can only be returned for loops with a single exit. |
| 6619 | /// Returning the minimum taken count among all exits is incorrect because one |
| 6620 | /// of the loop's exit limit's may have been skipped. howFarToZero assumes that |
| 6621 | /// the limit of each loop test is never skipped. This is a valid assumption as |
| 6622 | /// long as the loop exits via that test. For precise results, it is the |
| 6623 | /// caller's responsibility to specify the relevant loop exit using |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 6624 | /// getExact(ExitingBlock, SE). |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6625 | const SCEV * |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6626 | ScalarEvolution::BackedgeTakenInfo::getExact(ScalarEvolution *SE, |
| 6627 | SCEVUnionPredicate *Preds) const { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6628 | // If any exits were not computable, the loop is not computable. |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6629 | if (!isComplete() || ExitNotTaken.empty()) |
| 6630 | return SE->getCouldNotCompute(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6631 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 6632 | const SCEV *BECount = nullptr; |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6633 | for (auto &ENT : ExitNotTaken) { |
| 6634 | assert(ENT.ExactNotTaken != SE->getCouldNotCompute() && "bad exit SCEV"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6635 | |
| 6636 | if (!BECount) |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6637 | BECount = ENT.ExactNotTaken; |
| 6638 | else if (BECount != ENT.ExactNotTaken) |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 6639 | return SE->getCouldNotCompute(); |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6640 | if (Preds && !ENT.hasAlwaysTruePredicate()) |
| 6641 | Preds->add(ENT.Predicate.get()); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6642 | |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6643 | assert((Preds || ENT.hasAlwaysTruePredicate()) && |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6644 | "Predicate should be always true!"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6645 | } |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6646 | |
Andrew Trick | bbb226a | 2011-09-02 21:20:46 +0000 | [diff] [blame] | 6647 | assert(BECount && "Invalid not taken count for loop exit"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6648 | return BECount; |
| 6649 | } |
| 6650 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6651 | /// Get the exact not taken count for this loop exit. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6652 | const SCEV * |
Andrew Trick | 77c5542 | 2011-08-02 04:23:35 +0000 | [diff] [blame] | 6653 | ScalarEvolution::BackedgeTakenInfo::getExact(BasicBlock *ExitingBlock, |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6654 | ScalarEvolution *SE) const { |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6655 | for (auto &ENT : ExitNotTaken) |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6656 | if (ENT.ExitingBlock == ExitingBlock && ENT.hasAlwaysTruePredicate()) |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6657 | return ENT.ExactNotTaken; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6658 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6659 | return SE->getCouldNotCompute(); |
| 6660 | } |
| 6661 | |
| 6662 | /// getMax - Get the max backedge taken count for the loop. |
| 6663 | const SCEV * |
| 6664 | ScalarEvolution::BackedgeTakenInfo::getMax(ScalarEvolution *SE) const { |
Sanjoy Das | 7326861 | 2016-09-26 01:10:22 +0000 | [diff] [blame] | 6665 | auto PredicateNotAlwaysTrue = [](const ExitNotTakenInfo &ENT) { |
| 6666 | return !ENT.hasAlwaysTruePredicate(); |
| 6667 | }; |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6668 | |
Sanjoy Das | 7326861 | 2016-09-26 01:10:22 +0000 | [diff] [blame] | 6669 | if (any_of(ExitNotTaken, PredicateNotAlwaysTrue) || !getMax()) |
| 6670 | return SE->getCouldNotCompute(); |
| 6671 | |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6672 | assert((isa<SCEVCouldNotCompute>(getMax()) || isa<SCEVConstant>(getMax())) && |
| 6673 | "No point in having a non-constant max backedge taken count!"); |
Sanjoy Das | 7326861 | 2016-09-26 01:10:22 +0000 | [diff] [blame] | 6674 | return getMax(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6675 | } |
| 6676 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6677 | bool ScalarEvolution::BackedgeTakenInfo::isMaxOrZero(ScalarEvolution *SE) const { |
| 6678 | auto PredicateNotAlwaysTrue = [](const ExitNotTakenInfo &ENT) { |
| 6679 | return !ENT.hasAlwaysTruePredicate(); |
| 6680 | }; |
| 6681 | return MaxOrZero && !any_of(ExitNotTaken, PredicateNotAlwaysTrue); |
| 6682 | } |
| 6683 | |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 6684 | bool ScalarEvolution::BackedgeTakenInfo::hasOperand(const SCEV *S, |
| 6685 | ScalarEvolution *SE) const { |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6686 | if (getMax() && getMax() != SE->getCouldNotCompute() && |
| 6687 | SE->hasOperand(getMax(), S)) |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 6688 | return true; |
| 6689 | |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6690 | for (auto &ENT : ExitNotTaken) |
| 6691 | if (ENT.ExactNotTaken != SE->getCouldNotCompute() && |
| 6692 | SE->hasOperand(ENT.ExactNotTaken, S)) |
Silviu Baranga | a393baf | 2016-04-06 14:06:32 +0000 | [diff] [blame] | 6693 | return true; |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6694 | |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 6695 | return false; |
| 6696 | } |
| 6697 | |
Sanjoy Das | f6f6fb9 | 2017-05-15 04:22:09 +0000 | [diff] [blame] | 6698 | ScalarEvolution::ExitLimit::ExitLimit(const SCEV *E) |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 6699 | : ExactNotTaken(E), MaxNotTaken(E) { |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6700 | assert((isa<SCEVCouldNotCompute>(MaxNotTaken) || |
| 6701 | isa<SCEVConstant>(MaxNotTaken)) && |
| 6702 | "No point in having a non-constant max backedge taken count!"); |
| 6703 | } |
Sanjoy Das | f6f6fb9 | 2017-05-15 04:22:09 +0000 | [diff] [blame] | 6704 | |
| 6705 | ScalarEvolution::ExitLimit::ExitLimit( |
| 6706 | const SCEV *E, const SCEV *M, bool MaxOrZero, |
| 6707 | ArrayRef<const SmallPtrSetImpl<const SCEVPredicate *> *> PredSetList) |
| 6708 | : ExactNotTaken(E), MaxNotTaken(M), MaxOrZero(MaxOrZero) { |
| 6709 | assert((isa<SCEVCouldNotCompute>(ExactNotTaken) || |
| 6710 | !isa<SCEVCouldNotCompute>(MaxNotTaken)) && |
| 6711 | "Exact is not allowed to be less precise than Max"); |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6712 | assert((isa<SCEVCouldNotCompute>(MaxNotTaken) || |
| 6713 | isa<SCEVConstant>(MaxNotTaken)) && |
| 6714 | "No point in having a non-constant max backedge taken count!"); |
Sanjoy Das | f6f6fb9 | 2017-05-15 04:22:09 +0000 | [diff] [blame] | 6715 | for (auto *PredSet : PredSetList) |
| 6716 | for (auto *P : *PredSet) |
| 6717 | addPredicate(P); |
| 6718 | } |
| 6719 | |
| 6720 | ScalarEvolution::ExitLimit::ExitLimit( |
| 6721 | const SCEV *E, const SCEV *M, bool MaxOrZero, |
| 6722 | const SmallPtrSetImpl<const SCEVPredicate *> &PredSet) |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6723 | : ExitLimit(E, M, MaxOrZero, {&PredSet}) { |
| 6724 | assert((isa<SCEVCouldNotCompute>(MaxNotTaken) || |
| 6725 | isa<SCEVConstant>(MaxNotTaken)) && |
| 6726 | "No point in having a non-constant max backedge taken count!"); |
| 6727 | } |
Sanjoy Das | f6f6fb9 | 2017-05-15 04:22:09 +0000 | [diff] [blame] | 6728 | |
| 6729 | ScalarEvolution::ExitLimit::ExitLimit(const SCEV *E, const SCEV *M, |
| 6730 | bool MaxOrZero) |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6731 | : ExitLimit(E, M, MaxOrZero, None) { |
| 6732 | assert((isa<SCEVCouldNotCompute>(MaxNotTaken) || |
| 6733 | isa<SCEVConstant>(MaxNotTaken)) && |
| 6734 | "No point in having a non-constant max backedge taken count!"); |
| 6735 | } |
Sanjoy Das | f6f6fb9 | 2017-05-15 04:22:09 +0000 | [diff] [blame] | 6736 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6737 | /// Allocate memory for BackedgeTakenInfo and copy the not-taken count of each |
| 6738 | /// computable exit into a persistent ExitNotTakenInfo array. |
| 6739 | ScalarEvolution::BackedgeTakenInfo::BackedgeTakenInfo( |
Sanjoy Das | 5c4869b | 2016-09-26 01:10:27 +0000 | [diff] [blame] | 6740 | SmallVectorImpl<ScalarEvolution::BackedgeTakenInfo::EdgeExitInfo> |
| 6741 | &&ExitCounts, |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6742 | bool Complete, const SCEV *MaxCount, bool MaxOrZero) |
| 6743 | : MaxAndComplete(MaxCount, Complete), MaxOrZero(MaxOrZero) { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 6744 | using EdgeExitInfo = ScalarEvolution::BackedgeTakenInfo::EdgeExitInfo; |
| 6745 | |
Sanjoy Das | e935c77 | 2016-09-25 23:12:08 +0000 | [diff] [blame] | 6746 | ExitNotTaken.reserve(ExitCounts.size()); |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6747 | std::transform( |
| 6748 | ExitCounts.begin(), ExitCounts.end(), std::back_inserter(ExitNotTaken), |
Sanjoy Das | 6b76cdf | 2016-09-26 01:10:25 +0000 | [diff] [blame] | 6749 | [&](const EdgeExitInfo &EEI) { |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6750 | BasicBlock *ExitBB = EEI.first; |
| 6751 | const ExitLimit &EL = EEI.second; |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 6752 | if (EL.Predicates.empty()) |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6753 | return ExitNotTakenInfo(ExitBB, EL.ExactNotTaken, nullptr); |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 6754 | |
| 6755 | std::unique_ptr<SCEVUnionPredicate> Predicate(new SCEVUnionPredicate); |
| 6756 | for (auto *Pred : EL.Predicates) |
| 6757 | Predicate->add(Pred); |
| 6758 | |
| 6759 | return ExitNotTakenInfo(ExitBB, EL.ExactNotTaken, std::move(Predicate)); |
Sanjoy Das | c9bbf56 | 2016-09-25 23:12:04 +0000 | [diff] [blame] | 6760 | }); |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 6761 | assert((isa<SCEVCouldNotCompute>(MaxCount) || isa<SCEVConstant>(MaxCount)) && |
| 6762 | "No point in having a non-constant max backedge taken count!"); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6763 | } |
| 6764 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6765 | /// Invalidate this result and free the ExitNotTakenInfo array. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6766 | void ScalarEvolution::BackedgeTakenInfo::clear() { |
Sanjoy Das | d1eb62a | 2016-09-25 23:12:00 +0000 | [diff] [blame] | 6767 | ExitNotTaken.clear(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6768 | } |
| 6769 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 6770 | /// Compute the number of times the backedge of the specified loop will execute. |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 6771 | ScalarEvolution::BackedgeTakenInfo |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6772 | ScalarEvolution::computeBackedgeTakenCount(const Loop *L, |
| 6773 | bool AllowPredicates) { |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 6774 | SmallVector<BasicBlock *, 8> ExitingBlocks; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6775 | L->getExitingBlocks(ExitingBlocks); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 6776 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 6777 | using EdgeExitInfo = ScalarEvolution::BackedgeTakenInfo::EdgeExitInfo; |
Sanjoy Das | 6b76cdf | 2016-09-26 01:10:25 +0000 | [diff] [blame] | 6778 | |
| 6779 | SmallVector<EdgeExitInfo, 4> ExitCounts; |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6780 | bool CouldComputeBECount = true; |
Andrew Trick | ee5aa7f | 2014-01-15 06:42:11 +0000 | [diff] [blame] | 6781 | BasicBlock *Latch = L->getLoopLatch(); // may be NULL. |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6782 | const SCEV *MustExitMaxBECount = nullptr; |
| 6783 | const SCEV *MayExitMaxBECount = nullptr; |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6784 | bool MustExitMaxOrZero = false; |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6785 | |
| 6786 | // Compute the ExitLimit for each loop exit. Use this to populate ExitCounts |
| 6787 | // and compute maxBECount. |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6788 | // Do a union of all the predicates here. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6789 | for (unsigned i = 0, e = ExitingBlocks.size(); i != e; ++i) { |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6790 | BasicBlock *ExitBB = ExitingBlocks[i]; |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6791 | ExitLimit EL = computeExitLimit(L, ExitBB, AllowPredicates); |
| 6792 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 6793 | assert((AllowPredicates || EL.Predicates.empty()) && |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6794 | "Predicated exit limit when predicates are not allowed!"); |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6795 | |
| 6796 | // 1. For each exit that can be computed, add an entry to ExitCounts. |
| 6797 | // CouldComputeBECount is true only if all exits can be computed. |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6798 | if (EL.ExactNotTaken == getCouldNotCompute()) |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6799 | // We couldn't compute an exact value for this exit, so |
Dan Gohman | 8885b37 | 2009-06-22 21:10:22 +0000 | [diff] [blame] | 6800 | // we won't be able to compute an exact value for the loop. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6801 | CouldComputeBECount = false; |
| 6802 | else |
Sanjoy Das | bdd9710 | 2016-09-25 23:11:55 +0000 | [diff] [blame] | 6803 | ExitCounts.emplace_back(ExitBB, EL); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6804 | |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6805 | // 2. Derive the loop's MaxBECount from each exit's max number of |
| 6806 | // non-exiting iterations. Partition the loop exits into two kinds: |
| 6807 | // LoopMustExits and LoopMayExits. |
| 6808 | // |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6809 | // If the exit dominates the loop latch, it is a LoopMustExit otherwise it |
| 6810 | // is a LoopMayExit. If any computable LoopMustExit is found, then |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6811 | // MaxBECount is the minimum EL.MaxNotTaken of computable |
| 6812 | // LoopMustExits. Otherwise, MaxBECount is conservatively the maximum |
| 6813 | // EL.MaxNotTaken, where CouldNotCompute is considered greater than any |
| 6814 | // computable EL.MaxNotTaken. |
| 6815 | if (EL.MaxNotTaken != getCouldNotCompute() && Latch && |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 6816 | DT.dominates(ExitBB, Latch)) { |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6817 | if (!MustExitMaxBECount) { |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6818 | MustExitMaxBECount = EL.MaxNotTaken; |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6819 | MustExitMaxOrZero = EL.MaxOrZero; |
| 6820 | } else { |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6821 | MustExitMaxBECount = |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6822 | getUMinFromMismatchedTypes(MustExitMaxBECount, EL.MaxNotTaken); |
Andrew Trick | e255359 | 2014-05-22 00:37:03 +0000 | [diff] [blame] | 6823 | } |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6824 | } else if (MayExitMaxBECount != getCouldNotCompute()) { |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6825 | if (!MayExitMaxBECount || EL.MaxNotTaken == getCouldNotCompute()) |
| 6826 | MayExitMaxBECount = EL.MaxNotTaken; |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6827 | else { |
| 6828 | MayExitMaxBECount = |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6829 | getUMaxFromMismatchedTypes(MayExitMaxBECount, EL.MaxNotTaken); |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6830 | } |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 6831 | } |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6832 | } |
Andrew Trick | 839e30b | 2014-05-23 19:47:13 +0000 | [diff] [blame] | 6833 | const SCEV *MaxBECount = MustExitMaxBECount ? MustExitMaxBECount : |
| 6834 | (MayExitMaxBECount ? MayExitMaxBECount : getCouldNotCompute()); |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6835 | // The loop backedge will be taken the maximum or zero times if there's |
| 6836 | // a single exit that must be taken the maximum or zero times. |
| 6837 | bool MaxOrZero = (MustExitMaxOrZero && ExitingBlocks.size() == 1); |
Sanjoy Das | 5c4869b | 2016-09-26 01:10:27 +0000 | [diff] [blame] | 6838 | return BackedgeTakenInfo(std::move(ExitCounts), CouldComputeBECount, |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 6839 | MaxBECount, MaxOrZero); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6840 | } |
| 6841 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 6842 | ScalarEvolution::ExitLimit |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6843 | ScalarEvolution::computeExitLimit(const Loop *L, BasicBlock *ExitingBlock, |
Max Kazantsev | 2cb3653 | 2017-08-03 08:41:30 +0000 | [diff] [blame] | 6844 | bool AllowPredicates) { |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 6845 | // Okay, we've chosen an exiting block. See what condition causes us to exit |
| 6846 | // at this block and remember the exit block and whether all other targets |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6847 | // lead to the loop header. |
| 6848 | bool MustExecuteLoopHeader = true; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 6849 | BasicBlock *Exit = nullptr; |
Sanjoy Das | 0ff0787 | 2016-01-19 20:53:46 +0000 | [diff] [blame] | 6850 | for (auto *SBB : successors(ExitingBlock)) |
| 6851 | if (!L->contains(SBB)) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6852 | if (Exit) // Multiple exit successors. |
| 6853 | return getCouldNotCompute(); |
Sanjoy Das | 0ff0787 | 2016-01-19 20:53:46 +0000 | [diff] [blame] | 6854 | Exit = SBB; |
| 6855 | } else if (SBB != L->getHeader()) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6856 | MustExecuteLoopHeader = false; |
| 6857 | } |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 6858 | |
Chris Lattner | 1895485 | 2007-01-07 02:24:26 +0000 | [diff] [blame] | 6859 | // At this point, we know we have a conditional branch that determines whether |
| 6860 | // the loop is exited. However, we don't know if the branch is executed each |
| 6861 | // time through the loop. If not, then the execution count of the branch will |
| 6862 | // not be equal to the trip count of the loop. |
| 6863 | // |
| 6864 | // Currently we check for this by checking to see if the Exit branch goes to |
| 6865 | // the loop header. If so, we know it will always execute the same number of |
Chris Lattner | 5a55476 | 2007-01-14 01:24:47 +0000 | [diff] [blame] | 6866 | // times as the loop. We also handle the case where the exit block *is* the |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6867 | // loop header. This is common for un-rotated loops. |
| 6868 | // |
| 6869 | // If both of those tests fail, walk up the unique predecessor chain to the |
| 6870 | // header, stopping if there is an edge that doesn't exit the loop. If the |
| 6871 | // header is reached, the execution count of the branch will be equal to the |
| 6872 | // trip count of the loop. |
| 6873 | // |
| 6874 | // More extensive analysis could be done to handle more cases here. |
| 6875 | // |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6876 | if (!MustExecuteLoopHeader && ExitingBlock != L->getHeader()) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6877 | // The simple checks failed, try climbing the unique predecessor chain |
| 6878 | // up to the header. |
| 6879 | bool Ok = false; |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6880 | for (BasicBlock *BB = ExitingBlock; BB; ) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6881 | BasicBlock *Pred = BB->getUniquePredecessor(); |
| 6882 | if (!Pred) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6883 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6884 | TerminatorInst *PredTerm = Pred->getTerminator(); |
Pete Cooper | ebcd748 | 2015-08-06 20:22:46 +0000 | [diff] [blame] | 6885 | for (const BasicBlock *PredSucc : PredTerm->successors()) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6886 | if (PredSucc == BB) |
| 6887 | continue; |
| 6888 | // If the predecessor has a successor that isn't BB and isn't |
| 6889 | // outside the loop, assume the worst. |
| 6890 | if (L->contains(PredSucc)) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6891 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6892 | } |
| 6893 | if (Pred == L->getHeader()) { |
| 6894 | Ok = true; |
| 6895 | break; |
| 6896 | } |
| 6897 | BB = Pred; |
| 6898 | } |
| 6899 | if (!Ok) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6900 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6901 | } |
| 6902 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6903 | bool IsOnlyExit = (L->getExitingBlock() != nullptr); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6904 | TerminatorInst *Term = ExitingBlock->getTerminator(); |
| 6905 | if (BranchInst *BI = dyn_cast<BranchInst>(Term)) { |
| 6906 | assert(BI->isConditional() && "If unconditional, it can't be in loop!"); |
| 6907 | // Proceed to the next level to examine the exit condition expression. |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 6908 | return computeExitLimitFromCond( |
| 6909 | L, BI->getCondition(), BI->getSuccessor(0), BI->getSuccessor(1), |
| 6910 | /*ControlsExit=*/IsOnlyExit, AllowPredicates); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6911 | } |
| 6912 | |
| 6913 | if (SwitchInst *SI = dyn_cast<SwitchInst>(Term)) |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 6914 | return computeExitLimitFromSingleExitSwitch(L, SI, Exit, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 6915 | /*ControlsExit=*/IsOnlyExit); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 6916 | |
| 6917 | return getCouldNotCompute(); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6918 | } |
| 6919 | |
Sanjoy Das | bdbc493 | 2017-04-24 00:09:46 +0000 | [diff] [blame] | 6920 | ScalarEvolution::ExitLimit ScalarEvolution::computeExitLimitFromCond( |
| 6921 | const Loop *L, Value *ExitCond, BasicBlock *TBB, BasicBlock *FBB, |
| 6922 | bool ControlsExit, bool AllowPredicates) { |
| 6923 | ScalarEvolution::ExitLimitCacheTy Cache(L, TBB, FBB, AllowPredicates); |
| 6924 | return computeExitLimitFromCondCached(Cache, L, ExitCond, TBB, FBB, |
| 6925 | ControlsExit, AllowPredicates); |
| 6926 | } |
| 6927 | |
| 6928 | Optional<ScalarEvolution::ExitLimit> |
| 6929 | ScalarEvolution::ExitLimitCache::find(const Loop *L, Value *ExitCond, |
| 6930 | BasicBlock *TBB, BasicBlock *FBB, |
| 6931 | bool ControlsExit, bool AllowPredicates) { |
Sanjoy Das | 25972aa | 2017-04-24 00:46:40 +0000 | [diff] [blame] | 6932 | (void)this->L; |
| 6933 | (void)this->TBB; |
| 6934 | (void)this->FBB; |
| 6935 | (void)this->AllowPredicates; |
| 6936 | |
Sanjoy Das | bdbc493 | 2017-04-24 00:09:46 +0000 | [diff] [blame] | 6937 | assert(this->L == L && this->TBB == TBB && this->FBB == FBB && |
| 6938 | this->AllowPredicates == AllowPredicates && |
| 6939 | "Variance in assumed invariant key components!"); |
| 6940 | auto Itr = TripCountMap.find({ExitCond, ControlsExit}); |
| 6941 | if (Itr == TripCountMap.end()) |
| 6942 | return None; |
| 6943 | return Itr->second; |
| 6944 | } |
| 6945 | |
| 6946 | void ScalarEvolution::ExitLimitCache::insert(const Loop *L, Value *ExitCond, |
| 6947 | BasicBlock *TBB, BasicBlock *FBB, |
| 6948 | bool ControlsExit, |
| 6949 | bool AllowPredicates, |
| 6950 | const ExitLimit &EL) { |
| 6951 | assert(this->L == L && this->TBB == TBB && this->FBB == FBB && |
| 6952 | this->AllowPredicates == AllowPredicates && |
| 6953 | "Variance in assumed invariant key components!"); |
| 6954 | |
| 6955 | auto InsertResult = TripCountMap.insert({{ExitCond, ControlsExit}, EL}); |
| 6956 | assert(InsertResult.second && "Expected successful insertion!"); |
Sanjoy Das | 25972aa | 2017-04-24 00:46:40 +0000 | [diff] [blame] | 6957 | (void)InsertResult; |
Sanjoy Das | bdbc493 | 2017-04-24 00:09:46 +0000 | [diff] [blame] | 6958 | } |
| 6959 | |
| 6960 | ScalarEvolution::ExitLimit ScalarEvolution::computeExitLimitFromCondCached( |
| 6961 | ExitLimitCacheTy &Cache, const Loop *L, Value *ExitCond, BasicBlock *TBB, |
| 6962 | BasicBlock *FBB, bool ControlsExit, bool AllowPredicates) { |
| 6963 | |
| 6964 | if (auto MaybeEL = |
| 6965 | Cache.find(L, ExitCond, TBB, FBB, ControlsExit, AllowPredicates)) |
| 6966 | return *MaybeEL; |
| 6967 | |
| 6968 | ExitLimit EL = computeExitLimitFromCondImpl(Cache, L, ExitCond, TBB, FBB, |
| 6969 | ControlsExit, AllowPredicates); |
| 6970 | Cache.insert(L, ExitCond, TBB, FBB, ControlsExit, AllowPredicates, EL); |
| 6971 | return EL; |
| 6972 | } |
| 6973 | |
| 6974 | ScalarEvolution::ExitLimit ScalarEvolution::computeExitLimitFromCondImpl( |
| 6975 | ExitLimitCacheTy &Cache, const Loop *L, Value *ExitCond, BasicBlock *TBB, |
| 6976 | BasicBlock *FBB, bool ControlsExit, bool AllowPredicates) { |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 6977 | // Check if the controlling expression for this loop is an And or Or. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6978 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(ExitCond)) { |
| 6979 | if (BO->getOpcode() == Instruction::And) { |
| 6980 | // Recurse on the operands of the and. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 6981 | bool EitherMayExit = L->contains(TBB); |
Sanjoy Das | bdbc493 | 2017-04-24 00:09:46 +0000 | [diff] [blame] | 6982 | ExitLimit EL0 = computeExitLimitFromCondCached( |
| 6983 | Cache, L, BO->getOperand(0), TBB, FBB, ControlsExit && !EitherMayExit, |
| 6984 | AllowPredicates); |
| 6985 | ExitLimit EL1 = computeExitLimitFromCondCached( |
| 6986 | Cache, L, BO->getOperand(1), TBB, FBB, ControlsExit && !EitherMayExit, |
| 6987 | AllowPredicates); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 6988 | const SCEV *BECount = getCouldNotCompute(); |
| 6989 | const SCEV *MaxBECount = getCouldNotCompute(); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 6990 | if (EitherMayExit) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 6991 | // Both conditions must be true for the loop to continue executing. |
| 6992 | // Choose the less conservative count. |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6993 | if (EL0.ExactNotTaken == getCouldNotCompute() || |
| 6994 | EL1.ExactNotTaken == getCouldNotCompute()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 6995 | BECount = getCouldNotCompute(); |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 6996 | else |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 6997 | BECount = |
| 6998 | getUMinFromMismatchedTypes(EL0.ExactNotTaken, EL1.ExactNotTaken); |
| 6999 | if (EL0.MaxNotTaken == getCouldNotCompute()) |
| 7000 | MaxBECount = EL1.MaxNotTaken; |
| 7001 | else if (EL1.MaxNotTaken == getCouldNotCompute()) |
| 7002 | MaxBECount = EL0.MaxNotTaken; |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 7003 | else |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7004 | MaxBECount = |
| 7005 | getUMinFromMismatchedTypes(EL0.MaxNotTaken, EL1.MaxNotTaken); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7006 | } else { |
Dan Gohman | f7495f2 | 2010-08-11 00:12:36 +0000 | [diff] [blame] | 7007 | // Both conditions must be true at the same time for the loop to exit. |
| 7008 | // For now, be conservative. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7009 | assert(L->contains(FBB) && "Loop block has no successor in loop!"); |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7010 | if (EL0.MaxNotTaken == EL1.MaxNotTaken) |
| 7011 | MaxBECount = EL0.MaxNotTaken; |
| 7012 | if (EL0.ExactNotTaken == EL1.ExactNotTaken) |
| 7013 | BECount = EL0.ExactNotTaken; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7014 | } |
| 7015 | |
Sanjoy Das | 29a4b5d | 2016-01-19 20:53:51 +0000 | [diff] [blame] | 7016 | // There are cases (e.g. PR26207) where computeExitLimitFromCond is able |
| 7017 | // to be more aggressive when computing BECount than when computing |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7018 | // MaxBECount. In these cases it is possible for EL0.ExactNotTaken and |
| 7019 | // EL1.ExactNotTaken to match, but for EL0.MaxNotTaken and EL1.MaxNotTaken |
| 7020 | // to not. |
Sanjoy Das | 29a4b5d | 2016-01-19 20:53:51 +0000 | [diff] [blame] | 7021 | if (isa<SCEVCouldNotCompute>(MaxBECount) && |
| 7022 | !isa<SCEVCouldNotCompute>(BECount)) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 7023 | MaxBECount = getConstant(getUnsignedRangeMax(BECount)); |
Sanjoy Das | 29a4b5d | 2016-01-19 20:53:51 +0000 | [diff] [blame] | 7024 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 7025 | return ExitLimit(BECount, MaxBECount, false, |
| 7026 | {&EL0.Predicates, &EL1.Predicates}); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7027 | } |
| 7028 | if (BO->getOpcode() == Instruction::Or) { |
| 7029 | // Recurse on the operands of the or. |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 7030 | bool EitherMayExit = L->contains(FBB); |
Sanjoy Das | bdbc493 | 2017-04-24 00:09:46 +0000 | [diff] [blame] | 7031 | ExitLimit EL0 = computeExitLimitFromCondCached( |
| 7032 | Cache, L, BO->getOperand(0), TBB, FBB, ControlsExit && !EitherMayExit, |
| 7033 | AllowPredicates); |
| 7034 | ExitLimit EL1 = computeExitLimitFromCondCached( |
| 7035 | Cache, L, BO->getOperand(1), TBB, FBB, ControlsExit && !EitherMayExit, |
| 7036 | AllowPredicates); |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7037 | const SCEV *BECount = getCouldNotCompute(); |
| 7038 | const SCEV *MaxBECount = getCouldNotCompute(); |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 7039 | if (EitherMayExit) { |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7040 | // Both conditions must be false for the loop to continue executing. |
| 7041 | // Choose the less conservative count. |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7042 | if (EL0.ExactNotTaken == getCouldNotCompute() || |
| 7043 | EL1.ExactNotTaken == getCouldNotCompute()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7044 | BECount = getCouldNotCompute(); |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 7045 | else |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7046 | BECount = |
| 7047 | getUMinFromMismatchedTypes(EL0.ExactNotTaken, EL1.ExactNotTaken); |
| 7048 | if (EL0.MaxNotTaken == getCouldNotCompute()) |
| 7049 | MaxBECount = EL1.MaxNotTaken; |
| 7050 | else if (EL1.MaxNotTaken == getCouldNotCompute()) |
| 7051 | MaxBECount = EL0.MaxNotTaken; |
Dan Gohman | ed62738 | 2009-06-22 15:09:28 +0000 | [diff] [blame] | 7052 | else |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7053 | MaxBECount = |
| 7054 | getUMinFromMismatchedTypes(EL0.MaxNotTaken, EL1.MaxNotTaken); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7055 | } else { |
Dan Gohman | f7495f2 | 2010-08-11 00:12:36 +0000 | [diff] [blame] | 7056 | // Both conditions must be false at the same time for the loop to exit. |
| 7057 | // For now, be conservative. |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7058 | assert(L->contains(TBB) && "Loop block has no successor in loop!"); |
Sanjoy Das | 89eea6b | 2016-09-25 23:11:57 +0000 | [diff] [blame] | 7059 | if (EL0.MaxNotTaken == EL1.MaxNotTaken) |
| 7060 | MaxBECount = EL0.MaxNotTaken; |
| 7061 | if (EL0.ExactNotTaken == EL1.ExactNotTaken) |
| 7062 | BECount = EL0.ExactNotTaken; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7063 | } |
| 7064 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 7065 | return ExitLimit(BECount, MaxBECount, false, |
| 7066 | {&EL0.Predicates, &EL1.Predicates}); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7067 | } |
| 7068 | } |
| 7069 | |
| 7070 | // With an icmp, it may be feasible to compute an exact backedge-taken count. |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 7071 | // Proceed to the next level to examine the icmp. |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7072 | if (ICmpInst *ExitCondICmp = dyn_cast<ICmpInst>(ExitCond)) { |
| 7073 | ExitLimit EL = |
| 7074 | computeExitLimitFromICmp(L, ExitCondICmp, TBB, FBB, ControlsExit); |
| 7075 | if (EL.hasFullInfo() || !AllowPredicates) |
| 7076 | return EL; |
| 7077 | |
| 7078 | // Try again, but use SCEV predicates this time. |
| 7079 | return computeExitLimitFromICmp(L, ExitCondICmp, TBB, FBB, ControlsExit, |
| 7080 | /*AllowPredicates=*/true); |
| 7081 | } |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7082 | |
Dan Gohman | 6b1e2a8 | 2010-02-19 18:12:07 +0000 | [diff] [blame] | 7083 | // Check for a constant condition. These are normally stripped out by |
| 7084 | // SimplifyCFG, but ScalarEvolution may be used by a pass which wishes to |
| 7085 | // preserve the CFG and is temporarily leaving constant conditions |
| 7086 | // in place. |
| 7087 | if (ConstantInt *CI = dyn_cast<ConstantInt>(ExitCond)) { |
| 7088 | if (L->contains(FBB) == !CI->getZExtValue()) |
| 7089 | // The backedge is always taken. |
| 7090 | return getCouldNotCompute(); |
| 7091 | else |
| 7092 | // The backedge is never taken. |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 7093 | return getZero(CI->getType()); |
Dan Gohman | 6b1e2a8 | 2010-02-19 18:12:07 +0000 | [diff] [blame] | 7094 | } |
| 7095 | |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 7096 | // If it's not an integer or pointer comparison then compute it the hard way. |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 7097 | return computeExitCountExhaustively(L, ExitCond, !L->contains(TBB)); |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7098 | } |
| 7099 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7100 | ScalarEvolution::ExitLimit |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 7101 | ScalarEvolution::computeExitLimitFromICmp(const Loop *L, |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7102 | ICmpInst *ExitCond, |
| 7103 | BasicBlock *TBB, |
Andrew Trick | 5b245a1 | 2013-05-31 06:43:25 +0000 | [diff] [blame] | 7104 | BasicBlock *FBB, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7105 | bool ControlsExit, |
| 7106 | bool AllowPredicates) { |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7107 | // If the condition was exit on true, convert the condition to exit on false |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7108 | ICmpInst::Predicate Pred; |
Dan Gohman | 96212b6 | 2009-06-22 00:31:57 +0000 | [diff] [blame] | 7109 | if (!L->contains(FBB)) |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7110 | Pred = ExitCond->getPredicate(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7111 | else |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7112 | Pred = ExitCond->getInversePredicate(); |
| 7113 | const ICmpInst::Predicate OriginalPred = Pred; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7114 | |
| 7115 | // Handle common loops like: for (X = "string"; *X; ++X) |
| 7116 | if (LoadInst *LI = dyn_cast<LoadInst>(ExitCond->getOperand(0))) |
| 7117 | if (Constant *RHS = dyn_cast<Constant>(ExitCond->getOperand(1))) { |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7118 | ExitLimit ItCnt = |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7119 | computeLoadConstantCompareExitLimit(LI, RHS, L, Pred); |
Dan Gohman | ba82034 | 2010-02-24 17:31:30 +0000 | [diff] [blame] | 7120 | if (ItCnt.hasAnyInfo()) |
| 7121 | return ItCnt; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7122 | } |
| 7123 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7124 | const SCEV *LHS = getSCEV(ExitCond->getOperand(0)); |
| 7125 | const SCEV *RHS = getSCEV(ExitCond->getOperand(1)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7126 | |
| 7127 | // Try to evaluate any dependencies out of the loop. |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 7128 | LHS = getSCEVAtScope(LHS, L); |
| 7129 | RHS = getSCEVAtScope(RHS, L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7130 | |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7131 | // At this point, we would like to compute how many iterations of the |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7132 | // loop the predicate will return true for these inputs. |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 7133 | if (isLoopInvariant(LHS, L) && !isLoopInvariant(RHS, L)) { |
Dan Gohman | dc5f5cb | 2008-09-16 18:52:57 +0000 | [diff] [blame] | 7134 | // If there is a loop-invariant, force it into the RHS. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7135 | std::swap(LHS, RHS); |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7136 | Pred = ICmpInst::getSwappedPredicate(Pred); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7137 | } |
| 7138 | |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 7139 | // Simplify the operands before analyzing them. |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7140 | (void)SimplifyICmpOperands(Pred, LHS, RHS); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 7141 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7142 | // If we have a comparison of a chrec against a constant, try to use value |
| 7143 | // ranges to answer this query. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7144 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) |
| 7145 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(LHS)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7146 | if (AddRec->getLoop() == L) { |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 7147 | // Form the constant range. |
Sanjoy Das | 1f7b813 | 2016-10-02 00:09:57 +0000 | [diff] [blame] | 7148 | ConstantRange CompRange = |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7149 | ConstantRange::makeExactICmpRegion(Pred, RHSC->getAPInt()); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7150 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7151 | const SCEV *Ret = AddRec->getNumIterationsInRange(CompRange, *this); |
Eli Friedman | ebf98b0 | 2009-05-09 12:32:42 +0000 | [diff] [blame] | 7152 | if (!isa<SCEVCouldNotCompute>(Ret)) return Ret; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7153 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7154 | |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7155 | switch (Pred) { |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7156 | case ICmpInst::ICMP_NE: { // while (X != Y) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7157 | // Convert to: while (X-Y != 0) |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 7158 | ExitLimit EL = howFarToZero(getMinusSCEV(LHS, RHS), L, ControlsExit, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7159 | AllowPredicates); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7160 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7161 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7162 | } |
Dan Gohman | 8a8ad7d | 2009-08-20 16:42:55 +0000 | [diff] [blame] | 7163 | case ICmpInst::ICMP_EQ: { // while (X == Y) |
| 7164 | // Convert to: while (X-Y == 0) |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 7165 | ExitLimit EL = howFarToNonZero(getMinusSCEV(LHS, RHS), L); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7166 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7167 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7168 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7169 | case ICmpInst::ICMP_SLT: |
| 7170 | case ICmpInst::ICMP_ULT: { // while (X < Y) |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7171 | bool IsSigned = Pred == ICmpInst::ICMP_SLT; |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 7172 | ExitLimit EL = howManyLessThans(LHS, RHS, L, IsSigned, ControlsExit, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7173 | AllowPredicates); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7174 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7175 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7176 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 7177 | case ICmpInst::ICMP_SGT: |
| 7178 | case ICmpInst::ICMP_UGT: { // while (X > Y) |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7179 | bool IsSigned = Pred == ICmpInst::ICMP_SGT; |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7180 | ExitLimit EL = |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 7181 | howManyGreaterThans(LHS, RHS, L, IsSigned, ControlsExit, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 7182 | AllowPredicates); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7183 | if (EL.hasAnyInfo()) return EL; |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 7184 | break; |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7185 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7186 | default: |
Chris Lattner | 0defaa1 | 2004-04-03 00:43:03 +0000 | [diff] [blame] | 7187 | break; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7188 | } |
Sanjoy Das | 0da2d14 | 2016-06-30 02:47:28 +0000 | [diff] [blame] | 7189 | |
| 7190 | auto *ExhaustiveCount = |
| 7191 | computeExitCountExhaustively(L, ExitCond, !L->contains(TBB)); |
| 7192 | |
| 7193 | if (!isa<SCEVCouldNotCompute>(ExhaustiveCount)) |
| 7194 | return ExhaustiveCount; |
| 7195 | |
| 7196 | return computeShiftCompareExitLimit(ExitCond->getOperand(0), |
Max Kazantsev | 63a3de0 | 2017-12-08 12:54:32 +0000 | [diff] [blame] | 7197 | ExitCond->getOperand(1), L, OriginalPred); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7198 | } |
| 7199 | |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 7200 | ScalarEvolution::ExitLimit |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 7201 | ScalarEvolution::computeExitLimitFromSingleExitSwitch(const Loop *L, |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 7202 | SwitchInst *Switch, |
| 7203 | BasicBlock *ExitingBlock, |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 7204 | bool ControlsExit) { |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 7205 | assert(!L->contains(ExitingBlock) && "Not an exiting block!"); |
| 7206 | |
| 7207 | // Give up if the exit is the default dest of a switch. |
| 7208 | if (Switch->getDefaultDest() == ExitingBlock) |
| 7209 | return getCouldNotCompute(); |
| 7210 | |
| 7211 | assert(L->contains(Switch->getDefaultDest()) && |
| 7212 | "Default case must not exit the loop!"); |
| 7213 | const SCEV *LHS = getSCEVAtScope(Switch->getCondition(), L); |
| 7214 | const SCEV *RHS = getConstant(Switch->findCaseDest(ExitingBlock)); |
| 7215 | |
| 7216 | // while (X != Y) --> while (X-Y != 0) |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 7217 | ExitLimit EL = howFarToZero(getMinusSCEV(LHS, RHS), L, ControlsExit); |
Benjamin Kramer | 5a18854 | 2014-02-11 15:44:32 +0000 | [diff] [blame] | 7218 | if (EL.hasAnyInfo()) |
| 7219 | return EL; |
| 7220 | |
| 7221 | return getCouldNotCompute(); |
| 7222 | } |
| 7223 | |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7224 | static ConstantInt * |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 7225 | EvaluateConstantChrecAtConstant(const SCEVAddRecExpr *AddRec, ConstantInt *C, |
| 7226 | ScalarEvolution &SE) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7227 | const SCEV *InVal = SE.getConstant(C); |
| 7228 | const SCEV *Val = AddRec->evaluateAtIteration(InVal, SE); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7229 | assert(isa<SCEVConstant>(Val) && |
| 7230 | "Evaluation of SCEV at constant didn't fold correctly?"); |
| 7231 | return cast<SCEVConstant>(Val)->getValue(); |
| 7232 | } |
| 7233 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 7234 | /// Given an exit condition of 'icmp op load X, cst', try to see if we can |
| 7235 | /// compute the backedge execution count. |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7236 | ScalarEvolution::ExitLimit |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 7237 | ScalarEvolution::computeLoadConstantCompareExitLimit( |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 7238 | LoadInst *LI, |
| 7239 | Constant *RHS, |
| 7240 | const Loop *L, |
| 7241 | ICmpInst::Predicate predicate) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7242 | if (LI->isVolatile()) return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7243 | |
| 7244 | // Check to see if the loaded pointer is a getelementptr of a global. |
Dan Gohman | ba82034 | 2010-02-24 17:31:30 +0000 | [diff] [blame] | 7245 | // TODO: Use SCEV instead of manually grubbing with GEPs. |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7246 | GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(LI->getOperand(0)); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7247 | if (!GEP) return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7248 | |
| 7249 | // Make sure that it is really a constant global we are gepping, with an |
| 7250 | // initializer, and make sure the first IDX is really 0. |
| 7251 | GlobalVariable *GV = dyn_cast<GlobalVariable>(GEP->getOperand(0)); |
Dan Gohman | 5d5bc6d | 2009-08-19 18:20:44 +0000 | [diff] [blame] | 7252 | if (!GV || !GV->isConstant() || !GV->hasDefinitiveInitializer() || |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7253 | GEP->getNumOperands() < 3 || !isa<Constant>(GEP->getOperand(1)) || |
| 7254 | !cast<Constant>(GEP->getOperand(1))->isNullValue()) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7255 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7256 | |
| 7257 | // Okay, we allow one non-constant index into the GEP instruction. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7258 | Value *VarIdx = nullptr; |
Chris Lattner | e166a85 | 2012-01-24 05:49:24 +0000 | [diff] [blame] | 7259 | std::vector<Constant*> Indexes; |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7260 | unsigned VarIdxNum = 0; |
| 7261 | for (unsigned i = 2, e = GEP->getNumOperands(); i != e; ++i) |
| 7262 | if (ConstantInt *CI = dyn_cast<ConstantInt>(GEP->getOperand(i))) { |
| 7263 | Indexes.push_back(CI); |
| 7264 | } else if (!isa<ConstantInt>(GEP->getOperand(i))) { |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7265 | if (VarIdx) return getCouldNotCompute(); // Multiple non-constant idx's. |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7266 | VarIdx = GEP->getOperand(i); |
| 7267 | VarIdxNum = i-2; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7268 | Indexes.push_back(nullptr); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7269 | } |
| 7270 | |
Andrew Trick | 7004e4b | 2012-03-26 22:33:59 +0000 | [diff] [blame] | 7271 | // Loop-invariant loads may be a byproduct of loop optimization. Skip them. |
| 7272 | if (!VarIdx) |
| 7273 | return getCouldNotCompute(); |
| 7274 | |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7275 | // Okay, we know we have a (load (gep GV, 0, X)) comparison with a constant. |
| 7276 | // Check to see if X is a loop variant variable value now. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7277 | const SCEV *Idx = getSCEV(VarIdx); |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 7278 | Idx = getSCEVAtScope(Idx, L); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7279 | |
| 7280 | // We can only recognize very limited forms of loop index expressions, in |
| 7281 | // particular, only affine AddRec's like {C1,+,C2}. |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 7282 | const SCEVAddRecExpr *IdxExpr = dyn_cast<SCEVAddRecExpr>(Idx); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 7283 | if (!IdxExpr || !IdxExpr->isAffine() || isLoopInvariant(IdxExpr, L) || |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7284 | !isa<SCEVConstant>(IdxExpr->getOperand(0)) || |
| 7285 | !isa<SCEVConstant>(IdxExpr->getOperand(1))) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7286 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7287 | |
| 7288 | unsigned MaxSteps = MaxBruteForceIterations; |
| 7289 | for (unsigned IterationNum = 0; IterationNum != MaxSteps; ++IterationNum) { |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 7290 | ConstantInt *ItCst = ConstantInt::get( |
Owen Anderson | b6b2530 | 2009-07-14 23:09:55 +0000 | [diff] [blame] | 7291 | cast<IntegerType>(IdxExpr->getType()), IterationNum); |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 7292 | ConstantInt *Val = EvaluateConstantChrecAtConstant(IdxExpr, ItCst, *this); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7293 | |
| 7294 | // Form the GEP offset. |
| 7295 | Indexes[VarIdxNum] = Val; |
| 7296 | |
Chris Lattner | e166a85 | 2012-01-24 05:49:24 +0000 | [diff] [blame] | 7297 | Constant *Result = ConstantFoldLoadThroughGEPIndices(GV->getInitializer(), |
| 7298 | Indexes); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7299 | if (!Result) break; // Cannot compute! |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7300 | |
| 7301 | // Evaluate the condition for this iteration. |
Reid Spencer | 266e42b | 2006-12-23 06:05:41 +0000 | [diff] [blame] | 7302 | Result = ConstantExpr::getICmp(predicate, Result, RHS); |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 7303 | if (!isa<ConstantInt>(Result)) break; // Couldn't decide for sure |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 7304 | if (cast<ConstantInt>(Result)->getValue().isMinValue()) { |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7305 | ++NumArrayLenItCounts; |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 7306 | return getConstant(ItCst); // Found terminating iteration! |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7307 | } |
| 7308 | } |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7309 | return getCouldNotCompute(); |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7310 | } |
| 7311 | |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7312 | ScalarEvolution::ExitLimit ScalarEvolution::computeShiftCompareExitLimit( |
| 7313 | Value *LHS, Value *RHSV, const Loop *L, ICmpInst::Predicate Pred) { |
| 7314 | ConstantInt *RHS = dyn_cast<ConstantInt>(RHSV); |
| 7315 | if (!RHS) |
| 7316 | return getCouldNotCompute(); |
| 7317 | |
| 7318 | const BasicBlock *Latch = L->getLoopLatch(); |
| 7319 | if (!Latch) |
| 7320 | return getCouldNotCompute(); |
| 7321 | |
| 7322 | const BasicBlock *Predecessor = L->getLoopPredecessor(); |
| 7323 | if (!Predecessor) |
| 7324 | return getCouldNotCompute(); |
| 7325 | |
| 7326 | // Return true if V is of the form "LHS `shift_op` <positive constant>". |
| 7327 | // Return LHS in OutLHS and shift_opt in OutOpCode. |
| 7328 | auto MatchPositiveShift = |
| 7329 | [](Value *V, Value *&OutLHS, Instruction::BinaryOps &OutOpCode) { |
| 7330 | |
| 7331 | using namespace PatternMatch; |
| 7332 | |
| 7333 | ConstantInt *ShiftAmt; |
| 7334 | if (match(V, m_LShr(m_Value(OutLHS), m_ConstantInt(ShiftAmt)))) |
| 7335 | OutOpCode = Instruction::LShr; |
| 7336 | else if (match(V, m_AShr(m_Value(OutLHS), m_ConstantInt(ShiftAmt)))) |
| 7337 | OutOpCode = Instruction::AShr; |
| 7338 | else if (match(V, m_Shl(m_Value(OutLHS), m_ConstantInt(ShiftAmt)))) |
| 7339 | OutOpCode = Instruction::Shl; |
| 7340 | else |
| 7341 | return false; |
| 7342 | |
| 7343 | return ShiftAmt->getValue().isStrictlyPositive(); |
| 7344 | }; |
| 7345 | |
| 7346 | // Recognize a "shift recurrence" either of the form %iv or of %iv.shifted in |
| 7347 | // |
| 7348 | // loop: |
| 7349 | // %iv = phi i32 [ %iv.shifted, %loop ], [ %val, %preheader ] |
| 7350 | // %iv.shifted = lshr i32 %iv, <positive constant> |
| 7351 | // |
Simon Pilgrim | f2fbf43 | 2016-11-20 13:47:59 +0000 | [diff] [blame] | 7352 | // Return true on a successful match. Return the corresponding PHI node (%iv |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7353 | // above) in PNOut and the opcode of the shift operation in OpCodeOut. |
| 7354 | auto MatchShiftRecurrence = |
| 7355 | [&](Value *V, PHINode *&PNOut, Instruction::BinaryOps &OpCodeOut) { |
| 7356 | Optional<Instruction::BinaryOps> PostShiftOpCode; |
| 7357 | |
| 7358 | { |
| 7359 | Instruction::BinaryOps OpC; |
| 7360 | Value *V; |
| 7361 | |
| 7362 | // If we encounter a shift instruction, "peel off" the shift operation, |
| 7363 | // and remember that we did so. Later when we inspect %iv's backedge |
| 7364 | // value, we will make sure that the backedge value uses the same |
| 7365 | // operation. |
| 7366 | // |
| 7367 | // Note: the peeled shift operation does not have to be the same |
| 7368 | // instruction as the one feeding into the PHI's backedge value. We only |
| 7369 | // really care about it being the same *kind* of shift instruction -- |
| 7370 | // that's all that is required for our later inferences to hold. |
| 7371 | if (MatchPositiveShift(LHS, V, OpC)) { |
| 7372 | PostShiftOpCode = OpC; |
| 7373 | LHS = V; |
| 7374 | } |
| 7375 | } |
| 7376 | |
| 7377 | PNOut = dyn_cast<PHINode>(LHS); |
| 7378 | if (!PNOut || PNOut->getParent() != L->getHeader()) |
| 7379 | return false; |
| 7380 | |
| 7381 | Value *BEValue = PNOut->getIncomingValueForBlock(Latch); |
| 7382 | Value *OpLHS; |
| 7383 | |
| 7384 | return |
| 7385 | // The backedge value for the PHI node must be a shift by a positive |
| 7386 | // amount |
| 7387 | MatchPositiveShift(BEValue, OpLHS, OpCodeOut) && |
| 7388 | |
| 7389 | // of the PHI node itself |
| 7390 | OpLHS == PNOut && |
| 7391 | |
| 7392 | // and the kind of shift should be match the kind of shift we peeled |
| 7393 | // off, if any. |
| 7394 | (!PostShiftOpCode.hasValue() || *PostShiftOpCode == OpCodeOut); |
| 7395 | }; |
| 7396 | |
| 7397 | PHINode *PN; |
| 7398 | Instruction::BinaryOps OpCode; |
| 7399 | if (!MatchShiftRecurrence(LHS, PN, OpCode)) |
| 7400 | return getCouldNotCompute(); |
| 7401 | |
| 7402 | const DataLayout &DL = getDataLayout(); |
| 7403 | |
| 7404 | // The key rationale for this optimization is that for some kinds of shift |
| 7405 | // recurrences, the value of the recurrence "stabilizes" to either 0 or -1 |
| 7406 | // within a finite number of iterations. If the condition guarding the |
| 7407 | // backedge (in the sense that the backedge is taken if the condition is true) |
| 7408 | // is false for the value the shift recurrence stabilizes to, then we know |
| 7409 | // that the backedge is taken only a finite number of times. |
| 7410 | |
| 7411 | ConstantInt *StableValue = nullptr; |
| 7412 | switch (OpCode) { |
| 7413 | default: |
| 7414 | llvm_unreachable("Impossible case!"); |
| 7415 | |
| 7416 | case Instruction::AShr: { |
| 7417 | // {K,ashr,<positive-constant>} stabilizes to signum(K) in at most |
| 7418 | // bitwidth(K) iterations. |
| 7419 | Value *FirstValue = PN->getIncomingValueForBlock(Predecessor); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 7420 | KnownBits Known = computeKnownBits(FirstValue, DL, 0, nullptr, |
| 7421 | Predecessor->getTerminator(), &DT); |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7422 | auto *Ty = cast<IntegerType>(RHS->getType()); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 7423 | if (Known.isNonNegative()) |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7424 | StableValue = ConstantInt::get(Ty, 0); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 7425 | else if (Known.isNegative()) |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7426 | StableValue = ConstantInt::get(Ty, -1, true); |
| 7427 | else |
| 7428 | return getCouldNotCompute(); |
| 7429 | |
| 7430 | break; |
| 7431 | } |
| 7432 | case Instruction::LShr: |
| 7433 | case Instruction::Shl: |
| 7434 | // Both {K,lshr,<positive-constant>} and {K,shl,<positive-constant>} |
| 7435 | // stabilize to 0 in at most bitwidth(K) iterations. |
| 7436 | StableValue = ConstantInt::get(cast<IntegerType>(RHS->getType()), 0); |
| 7437 | break; |
| 7438 | } |
| 7439 | |
| 7440 | auto *Result = |
| 7441 | ConstantFoldCompareInstOperands(Pred, StableValue, RHS, DL, &TLI); |
| 7442 | assert(Result->getType()->isIntegerTy(1) && |
| 7443 | "Otherwise cannot be an operand to a branch instruction"); |
| 7444 | |
| 7445 | if (Result->isZeroValue()) { |
| 7446 | unsigned BitWidth = getTypeSizeInBits(RHS->getType()); |
| 7447 | const SCEV *UpperBound = |
| 7448 | getConstant(getEffectiveSCEVType(RHS->getType()), BitWidth); |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 7449 | return ExitLimit(getCouldNotCompute(), UpperBound, false); |
Sanjoy Das | c88f5d3 | 2015-10-28 21:27:14 +0000 | [diff] [blame] | 7450 | } |
| 7451 | |
| 7452 | return getCouldNotCompute(); |
| 7453 | } |
Chris Lattner | ec901cc | 2004-10-12 01:49:27 +0000 | [diff] [blame] | 7454 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 7455 | /// Return true if we can constant fold an instruction of the specified type, |
| 7456 | /// assuming that all operands were constants. |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7457 | static bool CanConstantFold(const Instruction *I) { |
Reid Spencer | 2341c22 | 2007-02-02 02:16:23 +0000 | [diff] [blame] | 7458 | if (isa<BinaryOperator>(I) || isa<CmpInst>(I) || |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7459 | isa<SelectInst>(I) || isa<CastInst>(I) || isa<GetElementPtrInst>(I) || |
| 7460 | isa<LoadInst>(I)) |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7461 | return true; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7462 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7463 | if (const CallInst *CI = dyn_cast<CallInst>(I)) |
| 7464 | if (const Function *F = CI->getCalledFunction()) |
Andrew Kaylor | 647025f | 2017-06-09 23:18:11 +0000 | [diff] [blame] | 7465 | return canConstantFoldCallTo(CI, F); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7466 | return false; |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7467 | } |
| 7468 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7469 | /// Determine whether this instruction can constant evolve within this loop |
| 7470 | /// assuming its operands can all constant evolve. |
| 7471 | static bool canConstantEvolve(Instruction *I, const Loop *L) { |
| 7472 | // An instruction outside of the loop can't be derived from a loop PHI. |
| 7473 | if (!L->contains(I)) return false; |
| 7474 | |
| 7475 | if (isa<PHINode>(I)) { |
David Blaikie | 19ef0d3 | 2015-03-24 16:33:19 +0000 | [diff] [blame] | 7476 | // We don't currently keep track of the control flow needed to evaluate |
| 7477 | // PHIs, so we cannot handle PHIs inside of loops. |
| 7478 | return L->getHeader() == I->getParent(); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7479 | } |
| 7480 | |
| 7481 | // If we won't be able to constant fold this expression even if the operands |
| 7482 | // are constants, bail early. |
| 7483 | return CanConstantFold(I); |
| 7484 | } |
| 7485 | |
| 7486 | /// getConstantEvolvingPHIOperands - Implement getConstantEvolvingPHI by |
| 7487 | /// recursing through each instruction operand until reaching a loop header phi. |
| 7488 | static PHINode * |
| 7489 | getConstantEvolvingPHIOperands(Instruction *UseInst, const Loop *L, |
Michael Liao | 468fb74 | 2017-01-13 18:28:30 +0000 | [diff] [blame] | 7490 | DenseMap<Instruction *, PHINode *> &PHIMap, |
| 7491 | unsigned Depth) { |
| 7492 | if (Depth > MaxConstantEvolvingDepth) |
| 7493 | return nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7494 | |
| 7495 | // Otherwise, we can evaluate this instruction if all of its operands are |
| 7496 | // constant or derived from a PHI node themselves. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7497 | PHINode *PHI = nullptr; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 7498 | for (Value *Op : UseInst->operands()) { |
| 7499 | if (isa<Constant>(Op)) continue; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7500 | |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 7501 | Instruction *OpInst = dyn_cast<Instruction>(Op); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7502 | if (!OpInst || !canConstantEvolve(OpInst, L)) return nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7503 | |
| 7504 | PHINode *P = dyn_cast<PHINode>(OpInst); |
Andrew Trick | 3e8a576 | 2011-10-05 22:06:53 +0000 | [diff] [blame] | 7505 | if (!P) |
| 7506 | // If this operand is already visited, reuse the prior result. |
| 7507 | // We may have P != PHI if this is the deepest point at which the |
| 7508 | // inconsistent paths meet. |
| 7509 | P = PHIMap.lookup(OpInst); |
| 7510 | if (!P) { |
| 7511 | // Recurse and memoize the results, whether a phi is found or not. |
| 7512 | // This recursive call invalidates pointers into PHIMap. |
Michael Liao | 468fb74 | 2017-01-13 18:28:30 +0000 | [diff] [blame] | 7513 | P = getConstantEvolvingPHIOperands(OpInst, L, PHIMap, Depth + 1); |
Andrew Trick | 3e8a576 | 2011-10-05 22:06:53 +0000 | [diff] [blame] | 7514 | PHIMap[OpInst] = P; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7515 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7516 | if (!P) |
| 7517 | return nullptr; // Not evolving from PHI |
| 7518 | if (PHI && PHI != P) |
| 7519 | return nullptr; // Evolving from multiple different PHIs. |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7520 | PHI = P; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7521 | } |
| 7522 | // This is a expression evolving from a constant PHI! |
| 7523 | return PHI; |
| 7524 | } |
| 7525 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7526 | /// getConstantEvolvingPHI - Given an LLVM value and a loop, return a PHI node |
| 7527 | /// in the loop that V is derived from. We allow arbitrary operations along the |
| 7528 | /// way, but the operands of an operation must either be constants or a value |
| 7529 | /// derived from a constant PHI. If this expression does not fit with these |
| 7530 | /// constraints, return null. |
| 7531 | static PHINode *getConstantEvolvingPHI(Value *V, const Loop *L) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7532 | Instruction *I = dyn_cast<Instruction>(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7533 | if (!I || !canConstantEvolve(I, L)) return nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7534 | |
Sanjoy Das | d295f2c | 2015-10-18 00:29:27 +0000 | [diff] [blame] | 7535 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7536 | return PN; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7537 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7538 | // Record non-constant instructions contained by the loop. |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7539 | DenseMap<Instruction *, PHINode *> PHIMap; |
Michael Liao | 468fb74 | 2017-01-13 18:28:30 +0000 | [diff] [blame] | 7540 | return getConstantEvolvingPHIOperands(I, L, PHIMap, 0); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7541 | } |
| 7542 | |
| 7543 | /// EvaluateExpression - Given an expression that passes the |
| 7544 | /// getConstantEvolvingPHI predicate, evaluate its value assuming the PHI node |
| 7545 | /// in the loop has the value PHIVal. If we can't fold this expression for some |
| 7546 | /// reason, return null. |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7547 | static Constant *EvaluateExpression(Value *V, const Loop *L, |
| 7548 | DenseMap<Instruction *, Constant *> &Vals, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 7549 | const DataLayout &DL, |
Chad Rosier | e6de63d | 2011-12-01 21:29:16 +0000 | [diff] [blame] | 7550 | const TargetLibraryInfo *TLI) { |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7551 | // Convenient constant check, but redundant for recursive calls. |
Reid Spencer | 30d69a5 | 2004-07-18 00:18:30 +0000 | [diff] [blame] | 7552 | if (Constant *C = dyn_cast<Constant>(V)) return C; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7553 | Instruction *I = dyn_cast<Instruction>(V); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7554 | if (!I) return nullptr; |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7555 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7556 | if (Constant *C = Vals.lookup(I)) return C; |
| 7557 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7558 | // An instruction inside the loop depends on a value outside the loop that we |
| 7559 | // weren't given a mapping for, or a value such as a call inside the loop. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7560 | if (!canConstantEvolve(I, L)) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7561 | |
| 7562 | // An unmapped PHI can be due to a branch or another loop inside this loop, |
| 7563 | // or due to this not being the initial iteration through a loop where we |
| 7564 | // couldn't compute the evolution of this particular PHI last time. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7565 | if (isa<PHINode>(I)) return nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7566 | |
Dan Gohman | f820bd3 | 2010-06-22 13:15:46 +0000 | [diff] [blame] | 7567 | std::vector<Constant*> Operands(I->getNumOperands()); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7568 | |
| 7569 | for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) { |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7570 | Instruction *Operand = dyn_cast<Instruction>(I->getOperand(i)); |
| 7571 | if (!Operand) { |
Nick Lewycky | a447e0f3 | 2011-10-14 09:38:46 +0000 | [diff] [blame] | 7572 | Operands[i] = dyn_cast<Constant>(I->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7573 | if (!Operands[i]) return nullptr; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7574 | continue; |
| 7575 | } |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 7576 | Constant *C = EvaluateExpression(Operand, L, Vals, DL, TLI); |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7577 | Vals[Operand] = C; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7578 | if (!C) return nullptr; |
Andrew Trick | e9162f1 | 2011-10-05 05:58:49 +0000 | [diff] [blame] | 7579 | Operands[i] = C; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7580 | } |
| 7581 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7582 | if (CmpInst *CI = dyn_cast<CmpInst>(I)) |
Chris Lattner | cdfb80d | 2009-11-09 23:06:58 +0000 | [diff] [blame] | 7583 | return ConstantFoldCompareInstOperands(CI->getPredicate(), Operands[0], |
Rafael Espindola | 7c68beb | 2014-02-18 15:33:12 +0000 | [diff] [blame] | 7584 | Operands[1], DL, TLI); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7585 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 7586 | if (!LI->isVolatile()) |
Eduard Burtescu | 1423921 | 2016-01-22 01:17:26 +0000 | [diff] [blame] | 7587 | return ConstantFoldLoadFromConstPtr(Operands[0], LI->getType(), DL); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7588 | } |
Manuel Jacob | e902459 | 2016-01-21 06:33:22 +0000 | [diff] [blame] | 7589 | return ConstantFoldInstOperands(I, Operands, DL, TLI); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7590 | } |
| 7591 | |
Sanjoy Das | 52bfa0f | 2015-11-02 02:06:01 +0000 | [diff] [blame] | 7592 | |
| 7593 | // If every incoming value to PN except the one for BB is a specific Constant, |
| 7594 | // return that, else return nullptr. |
| 7595 | static Constant *getOtherIncomingValue(PHINode *PN, BasicBlock *BB) { |
| 7596 | Constant *IncomingVal = nullptr; |
| 7597 | |
| 7598 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { |
| 7599 | if (PN->getIncomingBlock(i) == BB) |
| 7600 | continue; |
| 7601 | |
| 7602 | auto *CurrentVal = dyn_cast<Constant>(PN->getIncomingValue(i)); |
| 7603 | if (!CurrentVal) |
| 7604 | return nullptr; |
| 7605 | |
| 7606 | if (IncomingVal != CurrentVal) { |
| 7607 | if (IncomingVal) |
| 7608 | return nullptr; |
| 7609 | IncomingVal = CurrentVal; |
| 7610 | } |
| 7611 | } |
| 7612 | |
| 7613 | return IncomingVal; |
| 7614 | } |
| 7615 | |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7616 | /// getConstantEvolutionLoopExitValue - If we know that the specified Phi is |
| 7617 | /// in the header of its containing loop, we know the loop executes a |
| 7618 | /// constant number of times, and the PHI node is just a recurrence |
| 7619 | /// involving constants, fold it. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7620 | Constant * |
| 7621 | ScalarEvolution::getConstantEvolutionLoopExitValue(PHINode *PN, |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 7622 | const APInt &BEs, |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7623 | const Loop *L) { |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7624 | auto I = ConstantEvolutionLoopExitValue.find(PN); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7625 | if (I != ConstantEvolutionLoopExitValue.end()) |
| 7626 | return I->second; |
| 7627 | |
Dan Gohman | 4ce1fb1 | 2010-04-08 23:03:40 +0000 | [diff] [blame] | 7628 | if (BEs.ugt(MaxBruteForceIterations)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7629 | return ConstantEvolutionLoopExitValue[PN] = nullptr; // Not going to evaluate it. |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7630 | |
| 7631 | Constant *&RetVal = ConstantEvolutionLoopExitValue[PN]; |
| 7632 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7633 | DenseMap<Instruction *, Constant *> CurrentIterVals; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7634 | BasicBlock *Header = L->getHeader(); |
| 7635 | assert(PN->getParent() == Header && "Can't evaluate PHI not in loop header!"); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7636 | |
Sanjoy Das | dd70996 | 2015-10-08 18:28:36 +0000 | [diff] [blame] | 7637 | BasicBlock *Latch = L->getLoopLatch(); |
| 7638 | if (!Latch) |
| 7639 | return nullptr; |
| 7640 | |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7641 | for (auto &I : *Header) { |
| 7642 | PHINode *PHI = dyn_cast<PHINode>(&I); |
| 7643 | if (!PHI) break; |
Sanjoy Das | 52bfa0f | 2015-11-02 02:06:01 +0000 | [diff] [blame] | 7644 | auto *StartCST = getOtherIncomingValue(PHI, Latch); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7645 | if (!StartCST) continue; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7646 | CurrentIterVals[PHI] = StartCST; |
| 7647 | } |
| 7648 | if (!CurrentIterVals.count(PN)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7649 | return RetVal = nullptr; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7650 | |
Sanjoy Das | dd70996 | 2015-10-08 18:28:36 +0000 | [diff] [blame] | 7651 | Value *BEValue = PN->getIncomingValueForBlock(Latch); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7652 | |
| 7653 | // Execute the loop symbolically to determine the exit value. |
Sanjoy Das | b5a968f | 2017-07-29 05:32:47 +0000 | [diff] [blame] | 7654 | assert(BEs.getActiveBits() < CHAR_BIT * sizeof(unsigned) && |
| 7655 | "BEs is <= MaxBruteForceIterations which is an 'unsigned'!"); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7656 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 7657 | unsigned NumIterations = BEs.getZExtValue(); // must be in range |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 7658 | unsigned IterationNum = 0; |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 7659 | const DataLayout &DL = getDataLayout(); |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7660 | for (; ; ++IterationNum) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7661 | if (IterationNum == NumIterations) |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7662 | return RetVal = CurrentIterVals[PN]; // Got exit value! |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7663 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7664 | // Compute the value of the PHIs for the next iteration. |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7665 | // EvaluateExpression adds non-phi values to the CurrentIterVals map. |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7666 | DenseMap<Instruction *, Constant *> NextIterVals; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 7667 | Constant *NextPHI = |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7668 | EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7669 | if (!NextPHI) |
| 7670 | return nullptr; // Couldn't evaluate! |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7671 | NextIterVals[PN] = NextPHI; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7672 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7673 | bool StoppedEvolving = NextPHI == CurrentIterVals[PN]; |
| 7674 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7675 | // Also evaluate the other PHI nodes. However, we don't get to stop if we |
| 7676 | // cease to be able to evaluate one of them or if they stop evolving, |
| 7677 | // because that doesn't necessarily prevent us from computing PN. |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 7678 | SmallVector<std::pair<PHINode *, Constant *>, 8> PHIsToCompute; |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7679 | for (const auto &I : CurrentIterVals) { |
| 7680 | PHINode *PHI = dyn_cast<PHINode>(I.first); |
Nick Lewycky | 8e904de | 2011-10-24 05:51:01 +0000 | [diff] [blame] | 7681 | if (!PHI || PHI == PN || PHI->getParent() != Header) continue; |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7682 | PHIsToCompute.emplace_back(PHI, I.second); |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 7683 | } |
| 7684 | // We use two distinct loops because EvaluateExpression may invalidate any |
| 7685 | // iterators into CurrentIterVals. |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7686 | for (const auto &I : PHIsToCompute) { |
| 7687 | PHINode *PHI = I.first; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7688 | Constant *&NextPHI = NextIterVals[PHI]; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7689 | if (!NextPHI) { // Not already computed. |
Sanjoy Das | dd70996 | 2015-10-08 18:28:36 +0000 | [diff] [blame] | 7690 | Value *BEValue = PHI->getIncomingValueForBlock(Latch); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7691 | NextPHI = EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7692 | } |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7693 | if (NextPHI != I.second) |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7694 | StoppedEvolving = false; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7695 | } |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7696 | |
| 7697 | // If all entries in CurrentIterVals == NextIterVals then we can stop |
| 7698 | // iterating, the loop can't continue to change. |
| 7699 | if (StoppedEvolving) |
| 7700 | return RetVal = CurrentIterVals[PN]; |
| 7701 | |
Andrew Trick | 3a86ba7 | 2011-10-05 03:25:31 +0000 | [diff] [blame] | 7702 | CurrentIterVals.swap(NextIterVals); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7703 | } |
| 7704 | } |
| 7705 | |
Sanjoy Das | 413dbbb | 2015-10-08 18:46:59 +0000 | [diff] [blame] | 7706 | const SCEV *ScalarEvolution::computeExitCountExhaustively(const Loop *L, |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7707 | Value *Cond, |
| 7708 | bool ExitWhen) { |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7709 | PHINode *PN = getConstantEvolvingPHI(Cond, L); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7710 | if (!PN) return getCouldNotCompute(); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7711 | |
Dan Gohman | 866971e | 2010-06-19 14:17:24 +0000 | [diff] [blame] | 7712 | // If the loop is canonicalized, the PHI will have exactly two entries. |
| 7713 | // That's the only form we support here. |
| 7714 | if (PN->getNumIncomingValues() != 2) return getCouldNotCompute(); |
| 7715 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7716 | DenseMap<Instruction *, Constant *> CurrentIterVals; |
| 7717 | BasicBlock *Header = L->getHeader(); |
| 7718 | assert(PN->getParent() == Header && "Can't evaluate PHI not in loop header!"); |
| 7719 | |
Sanjoy Das | dd70996 | 2015-10-08 18:28:36 +0000 | [diff] [blame] | 7720 | BasicBlock *Latch = L->getLoopLatch(); |
| 7721 | assert(Latch && "Should follow from NumIncomingValues == 2!"); |
| 7722 | |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7723 | for (auto &I : *Header) { |
| 7724 | PHINode *PHI = dyn_cast<PHINode>(&I); |
| 7725 | if (!PHI) |
| 7726 | break; |
Sanjoy Das | 52bfa0f | 2015-11-02 02:06:01 +0000 | [diff] [blame] | 7727 | auto *StartCST = getOtherIncomingValue(PHI, Latch); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7728 | if (!StartCST) continue; |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7729 | CurrentIterVals[PHI] = StartCST; |
| 7730 | } |
| 7731 | if (!CurrentIterVals.count(PN)) |
| 7732 | return getCouldNotCompute(); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7733 | |
| 7734 | // Okay, we find a PHI node that defines the trip count of this loop. Execute |
| 7735 | // the loop symbolically to determine when the condition gets a value of |
| 7736 | // "ExitWhen". |
Andrew Trick | 90c7a10 | 2011-11-16 00:52:40 +0000 | [diff] [blame] | 7737 | unsigned MaxIterations = MaxBruteForceIterations; // Limit analysis. |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 7738 | const DataLayout &DL = getDataLayout(); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7739 | for (unsigned IterationNum = 0; IterationNum != MaxIterations;++IterationNum){ |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7740 | auto *CondVal = dyn_cast_or_null<ConstantInt>( |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7741 | EvaluateExpression(Cond, L, CurrentIterVals, DL, &TLI)); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7742 | |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 7743 | // Couldn't symbolically evaluate. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7744 | if (!CondVal) return getCouldNotCompute(); |
Zhou Sheng | 75b871f | 2007-01-11 12:24:14 +0000 | [diff] [blame] | 7745 | |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 7746 | if (CondVal->getValue() == uint64_t(ExitWhen)) { |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7747 | ++NumBruteForceTripCountsComputed; |
Owen Anderson | 55f1c09 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 7748 | return getConstant(Type::getInt32Ty(getContext()), IterationNum); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7749 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7750 | |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7751 | // Update all the PHI nodes for the next iteration. |
| 7752 | DenseMap<Instruction *, Constant *> NextIterVals; |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 7753 | |
| 7754 | // Create a list of which PHIs we need to compute. We want to do this before |
| 7755 | // calling EvaluateExpression on them because that may invalidate iterators |
| 7756 | // into CurrentIterVals. |
| 7757 | SmallVector<PHINode *, 8> PHIsToCompute; |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7758 | for (const auto &I : CurrentIterVals) { |
| 7759 | PHINode *PHI = dyn_cast<PHINode>(I.first); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7760 | if (!PHI || PHI->getParent() != Header) continue; |
Nick Lewycky | d48ab84 | 2011-11-12 03:09:12 +0000 | [diff] [blame] | 7761 | PHIsToCompute.push_back(PHI); |
| 7762 | } |
Sanjoy Das | 4493b40 | 2015-10-07 17:38:25 +0000 | [diff] [blame] | 7763 | for (PHINode *PHI : PHIsToCompute) { |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7764 | Constant *&NextPHI = NextIterVals[PHI]; |
| 7765 | if (NextPHI) continue; // Already computed! |
| 7766 | |
Sanjoy Das | dd70996 | 2015-10-08 18:28:36 +0000 | [diff] [blame] | 7767 | Value *BEValue = PHI->getIncomingValueForBlock(Latch); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7768 | NextPHI = EvaluateExpression(BEValue, L, CurrentIterVals, DL, &TLI); |
Duncan Sands | a370f3e | 2011-10-25 12:28:52 +0000 | [diff] [blame] | 7769 | } |
| 7770 | CurrentIterVals.swap(NextIterVals); |
Chris Lattner | 4021d1a | 2004-04-17 18:36:24 +0000 | [diff] [blame] | 7771 | } |
| 7772 | |
| 7773 | // Too many iterations were needed to evaluate. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 7774 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7775 | } |
| 7776 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7777 | const SCEV *ScalarEvolution::getSCEVAtScope(const SCEV *V, const Loop *L) { |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 7778 | SmallVector<std::pair<const Loop *, const SCEV *>, 2> &Values = |
| 7779 | ValuesAtScopes[V]; |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 7780 | // Check to see if we've folded this expression at this loop before. |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 7781 | for (auto &LS : Values) |
| 7782 | if (LS.first == L) |
| 7783 | return LS.second ? LS.second : V; |
| 7784 | |
| 7785 | Values.emplace_back(L, nullptr); |
| 7786 | |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 7787 | // Otherwise compute it. |
| 7788 | const SCEV *C = computeSCEVAtScope(V, L); |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 7789 | for (auto &LS : reverse(ValuesAtScopes[V])) |
| 7790 | if (LS.first == L) { |
| 7791 | LS.second = C; |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 7792 | break; |
| 7793 | } |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 7794 | return C; |
| 7795 | } |
| 7796 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7797 | /// This builds up a Constant using the ConstantExpr interface. That way, we |
| 7798 | /// will return Constants for objects which aren't represented by a |
| 7799 | /// SCEVConstant, because SCEVConstant is restricted to ConstantInt. |
| 7800 | /// Returns NULL if the SCEV isn't representable as a Constant. |
| 7801 | static Constant *BuildConstantFromSCEV(const SCEV *V) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 7802 | switch (static_cast<SCEVTypes>(V->getSCEVType())) { |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7803 | case scCouldNotCompute: |
| 7804 | case scAddRecExpr: |
| 7805 | break; |
| 7806 | case scConstant: |
| 7807 | return cast<SCEVConstant>(V)->getValue(); |
| 7808 | case scUnknown: |
| 7809 | return dyn_cast<Constant>(cast<SCEVUnknown>(V)->getValue()); |
| 7810 | case scSignExtend: { |
| 7811 | const SCEVSignExtendExpr *SS = cast<SCEVSignExtendExpr>(V); |
| 7812 | if (Constant *CastOp = BuildConstantFromSCEV(SS->getOperand())) |
| 7813 | return ConstantExpr::getSExt(CastOp, SS->getType()); |
| 7814 | break; |
| 7815 | } |
| 7816 | case scZeroExtend: { |
| 7817 | const SCEVZeroExtendExpr *SZ = cast<SCEVZeroExtendExpr>(V); |
| 7818 | if (Constant *CastOp = BuildConstantFromSCEV(SZ->getOperand())) |
| 7819 | return ConstantExpr::getZExt(CastOp, SZ->getType()); |
| 7820 | break; |
| 7821 | } |
| 7822 | case scTruncate: { |
| 7823 | const SCEVTruncateExpr *ST = cast<SCEVTruncateExpr>(V); |
| 7824 | if (Constant *CastOp = BuildConstantFromSCEV(ST->getOperand())) |
| 7825 | return ConstantExpr::getTrunc(CastOp, ST->getType()); |
| 7826 | break; |
| 7827 | } |
| 7828 | case scAddExpr: { |
| 7829 | const SCEVAddExpr *SA = cast<SCEVAddExpr>(V); |
| 7830 | if (Constant *C = BuildConstantFromSCEV(SA->getOperand(0))) { |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 7831 | if (PointerType *PTy = dyn_cast<PointerType>(C->getType())) { |
| 7832 | unsigned AS = PTy->getAddressSpace(); |
| 7833 | Type *DestPtrTy = Type::getInt8PtrTy(C->getContext(), AS); |
| 7834 | C = ConstantExpr::getBitCast(C, DestPtrTy); |
| 7835 | } |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7836 | for (unsigned i = 1, e = SA->getNumOperands(); i != e; ++i) { |
| 7837 | Constant *C2 = BuildConstantFromSCEV(SA->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7838 | if (!C2) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7839 | |
| 7840 | // First pointer! |
| 7841 | if (!C->getType()->isPointerTy() && C2->getType()->isPointerTy()) { |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 7842 | unsigned AS = C2->getType()->getPointerAddressSpace(); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7843 | std::swap(C, C2); |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 7844 | Type *DestPtrTy = Type::getInt8PtrTy(C->getContext(), AS); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7845 | // The offsets have been converted to bytes. We can add bytes to an |
| 7846 | // i8* by GEP with the byte count in the first index. |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 7847 | C = ConstantExpr::getBitCast(C, DestPtrTy); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7848 | } |
| 7849 | |
| 7850 | // Don't bother trying to sum two pointers. We probably can't |
| 7851 | // statically compute a load that results from it anyway. |
| 7852 | if (C2->getType()->isPointerTy()) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7853 | return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7854 | |
Matt Arsenault | be18b8a | 2013-10-21 18:41:10 +0000 | [diff] [blame] | 7855 | if (PointerType *PTy = dyn_cast<PointerType>(C->getType())) { |
| 7856 | if (PTy->getElementType()->isStructTy()) |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7857 | C2 = ConstantExpr::getIntegerCast( |
| 7858 | C2, Type::getInt32Ty(C->getContext()), true); |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 7859 | C = ConstantExpr::getGetElementPtr(PTy->getElementType(), C, C2); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7860 | } else |
| 7861 | C = ConstantExpr::getAdd(C, C2); |
| 7862 | } |
| 7863 | return C; |
| 7864 | } |
| 7865 | break; |
| 7866 | } |
| 7867 | case scMulExpr: { |
| 7868 | const SCEVMulExpr *SM = cast<SCEVMulExpr>(V); |
| 7869 | if (Constant *C = BuildConstantFromSCEV(SM->getOperand(0))) { |
| 7870 | // Don't bother with pointers at all. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7871 | if (C->getType()->isPointerTy()) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7872 | for (unsigned i = 1, e = SM->getNumOperands(); i != e; ++i) { |
| 7873 | Constant *C2 = BuildConstantFromSCEV(SM->getOperand(i)); |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7874 | if (!C2 || C2->getType()->isPointerTy()) return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7875 | C = ConstantExpr::getMul(C, C2); |
| 7876 | } |
| 7877 | return C; |
| 7878 | } |
| 7879 | break; |
| 7880 | } |
| 7881 | case scUDivExpr: { |
| 7882 | const SCEVUDivExpr *SU = cast<SCEVUDivExpr>(V); |
| 7883 | if (Constant *LHS = BuildConstantFromSCEV(SU->getLHS())) |
| 7884 | if (Constant *RHS = BuildConstantFromSCEV(SU->getRHS())) |
| 7885 | if (LHS->getType() == RHS->getType()) |
| 7886 | return ConstantExpr::getUDiv(LHS, RHS); |
| 7887 | break; |
| 7888 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 7889 | case scSMaxExpr: |
| 7890 | case scUMaxExpr: |
| 7891 | break; // TODO: smax, umax. |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7892 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7893 | return nullptr; |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7894 | } |
| 7895 | |
Dan Gohman | cc2f1eb | 2009-08-31 21:15:23 +0000 | [diff] [blame] | 7896 | const SCEV *ScalarEvolution::computeSCEVAtScope(const SCEV *V, const Loop *L) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7897 | if (isa<SCEVConstant>(V)) return V; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 7898 | |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 7899 | // If this instruction is evolved from a constant-evolving PHI, compute the |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7900 | // exit value from the loop without using SCEVs. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7901 | if (const SCEVUnknown *SU = dyn_cast<SCEVUnknown>(V)) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7902 | if (Instruction *I = dyn_cast<Instruction>(SU->getValue())) { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7903 | const Loop *LI = this->LI[I->getParent()]; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7904 | if (LI && LI->getParentLoop() == L) // Looking for loop exit value. |
| 7905 | if (PHINode *PN = dyn_cast<PHINode>(I)) |
| 7906 | if (PN->getParent() == LI->getHeader()) { |
| 7907 | // Okay, there is no closed form solution for the PHI node. Check |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 7908 | // to see if the loop that contains it has a known backedge-taken |
| 7909 | // count. If so, we may be able to force computation of the exit |
| 7910 | // value. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 7911 | const SCEV *BackedgeTakenCount = getBackedgeTakenCount(LI); |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7912 | if (const SCEVConstant *BTCC = |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 7913 | dyn_cast<SCEVConstant>(BackedgeTakenCount)) { |
Sanjoy Das | 4cad61a | 2017-08-01 22:37:58 +0000 | [diff] [blame] | 7914 | |
| 7915 | // This trivial case can show up in some degenerate cases where |
| 7916 | // the incoming IR has not yet been fully simplified. |
| 7917 | if (BTCC->getValue()->isZero()) { |
| 7918 | Value *InitValue = nullptr; |
| 7919 | bool MultipleInitValues = false; |
| 7920 | for (unsigned i = 0; i < PN->getNumIncomingValues(); i++) { |
| 7921 | if (!LI->contains(PN->getIncomingBlock(i))) { |
| 7922 | if (!InitValue) |
| 7923 | InitValue = PN->getIncomingValue(i); |
| 7924 | else if (InitValue != PN->getIncomingValue(i)) { |
| 7925 | MultipleInitValues = true; |
| 7926 | break; |
| 7927 | } |
| 7928 | } |
| 7929 | if (!MultipleInitValues && InitValue) |
| 7930 | return getSCEV(InitValue); |
| 7931 | } |
| 7932 | } |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7933 | // Okay, we know how many times the containing loop executes. If |
| 7934 | // this is a constant evolving PHI node, get the final value at |
| 7935 | // the specified iteration number. |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 7936 | Constant *RV = |
| 7937 | getConstantEvolutionLoopExitValue(PN, BTCC->getAPInt(), LI); |
Dan Gohman | 9d203c6 | 2009-06-29 21:31:18 +0000 | [diff] [blame] | 7938 | if (RV) return getSCEV(RV); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7939 | } |
| 7940 | } |
| 7941 | |
Reid Spencer | e6328ca | 2006-12-04 21:33:23 +0000 | [diff] [blame] | 7942 | // Okay, this is an expression that we cannot symbolically evaluate |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7943 | // into a SCEV. Check to see if it's possible to symbolically evaluate |
Reid Spencer | e6328ca | 2006-12-04 21:33:23 +0000 | [diff] [blame] | 7944 | // the arguments into constants, and if so, try to constant propagate the |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7945 | // result. This is particularly useful for computing loop exit values. |
| 7946 | if (CanConstantFold(I)) { |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7947 | SmallVector<Constant *, 4> Operands; |
| 7948 | bool MadeImprovement = false; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 7949 | for (Value *Op : I->operands()) { |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7950 | if (Constant *C = dyn_cast<Constant>(Op)) { |
| 7951 | Operands.push_back(C); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7952 | continue; |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7953 | } |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7954 | |
| 7955 | // If any of the operands is non-constant and if they are |
| 7956 | // non-integer and non-pointer, don't even try to analyze them |
| 7957 | // with scev techniques. |
| 7958 | if (!isSCEVable(Op->getType())) |
| 7959 | return V; |
| 7960 | |
| 7961 | const SCEV *OrigV = getSCEV(Op); |
| 7962 | const SCEV *OpV = getSCEVAtScope(OrigV, L); |
| 7963 | MadeImprovement |= OrigV != OpV; |
| 7964 | |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7965 | Constant *C = BuildConstantFromSCEV(OpV); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7966 | if (!C) return V; |
| 7967 | if (C->getType() != Op->getType()) |
| 7968 | C = ConstantExpr::getCast(CastInst::getCastOpcode(C, false, |
| 7969 | Op->getType(), |
| 7970 | false), |
| 7971 | C, Op->getType()); |
| 7972 | Operands.push_back(C); |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7973 | } |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 7974 | |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7975 | // Check to see if getSCEVAtScope actually made an improvement. |
| 7976 | if (MadeImprovement) { |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 7977 | Constant *C = nullptr; |
Sanjoy Das | 49edd3b | 2015-10-27 00:52:09 +0000 | [diff] [blame] | 7978 | const DataLayout &DL = getDataLayout(); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7979 | if (const CmpInst *CI = dyn_cast<CmpInst>(I)) |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 7980 | C = ConstantFoldCompareInstOperands(CI->getPredicate(), Operands[0], |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 7981 | Operands[1], DL, &TLI); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7982 | else if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 7983 | if (!LI->isVolatile()) |
Eduard Burtescu | 1423921 | 2016-01-22 01:17:26 +0000 | [diff] [blame] | 7984 | C = ConstantFoldLoadFromConstPtr(Operands[0], LI->getType(), DL); |
Nick Lewycky | a6674c7 | 2011-10-22 19:58:20 +0000 | [diff] [blame] | 7985 | } else |
Manuel Jacob | e902459 | 2016-01-21 06:33:22 +0000 | [diff] [blame] | 7986 | C = ConstantFoldInstOperands(I, Operands, DL, &TLI); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7987 | if (!C) return V; |
Dan Gohman | 4aad750 | 2010-02-24 19:31:47 +0000 | [diff] [blame] | 7988 | return getSCEV(C); |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 7989 | } |
Chris Lattner | dd73047 | 2004-04-17 22:58:41 +0000 | [diff] [blame] | 7990 | } |
| 7991 | } |
| 7992 | |
| 7993 | // This is some other type of SCEVUnknown, just return it. |
| 7994 | return V; |
| 7995 | } |
| 7996 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 7997 | if (const SCEVCommutativeExpr *Comm = dyn_cast<SCEVCommutativeExpr>(V)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 7998 | // Avoid performing the look-up in the common case where the specified |
| 7999 | // expression has no loop-variant portions. |
| 8000 | for (unsigned i = 0, e = Comm->getNumOperands(); i != e; ++i) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8001 | const SCEV *OpAtScope = getSCEVAtScope(Comm->getOperand(i), L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8002 | if (OpAtScope != Comm->getOperand(i)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8003 | // Okay, at least one of these operands is loop variant but might be |
| 8004 | // foldable. Build a new instance of the folded commutative expression. |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 8005 | SmallVector<const SCEV *, 8> NewOps(Comm->op_begin(), |
| 8006 | Comm->op_begin()+i); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8007 | NewOps.push_back(OpAtScope); |
| 8008 | |
| 8009 | for (++i; i != e; ++i) { |
| 8010 | OpAtScope = getSCEVAtScope(Comm->getOperand(i), L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8011 | NewOps.push_back(OpAtScope); |
| 8012 | } |
| 8013 | if (isa<SCEVAddExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8014 | return getAddExpr(NewOps); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 8015 | if (isa<SCEVMulExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8016 | return getMulExpr(NewOps); |
Nick Lewycky | cdb7e54 | 2007-11-25 22:41:31 +0000 | [diff] [blame] | 8017 | if (isa<SCEVSMaxExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8018 | return getSMaxExpr(NewOps); |
Nick Lewycky | 1c44ebc | 2008-02-20 06:48:22 +0000 | [diff] [blame] | 8019 | if (isa<SCEVUMaxExpr>(Comm)) |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8020 | return getUMaxExpr(NewOps); |
Torok Edwin | fbcc663 | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 8021 | llvm_unreachable("Unknown commutative SCEV type!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8022 | } |
| 8023 | } |
| 8024 | // If we got here, all operands are loop invariant. |
| 8025 | return Comm; |
| 8026 | } |
| 8027 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8028 | if (const SCEVUDivExpr *Div = dyn_cast<SCEVUDivExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8029 | const SCEV *LHS = getSCEVAtScope(Div->getLHS(), L); |
| 8030 | const SCEV *RHS = getSCEVAtScope(Div->getRHS(), L); |
Nick Lewycky | 5234830 | 2009-01-13 09:18:58 +0000 | [diff] [blame] | 8031 | if (LHS == Div->getLHS() && RHS == Div->getRHS()) |
| 8032 | return Div; // must be loop invariant |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8033 | return getUDivExpr(LHS, RHS); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8034 | } |
| 8035 | |
| 8036 | // If this is a loop recurrence for a loop that does not contain L, then we |
| 8037 | // are dealing with the final value computed by the loop. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8038 | if (const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(V)) { |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 8039 | // First, attempt to evaluate each operand. |
| 8040 | // Avoid performing the look-up in the common case where the specified |
| 8041 | // expression has no loop-variant portions. |
| 8042 | for (unsigned i = 0, e = AddRec->getNumOperands(); i != e; ++i) { |
| 8043 | const SCEV *OpAtScope = getSCEVAtScope(AddRec->getOperand(i), L); |
| 8044 | if (OpAtScope == AddRec->getOperand(i)) |
| 8045 | continue; |
| 8046 | |
| 8047 | // Okay, at least one of these operands is loop variant but might be |
| 8048 | // foldable. Build a new instance of the folded commutative expression. |
| 8049 | SmallVector<const SCEV *, 8> NewOps(AddRec->op_begin(), |
| 8050 | AddRec->op_begin()+i); |
| 8051 | NewOps.push_back(OpAtScope); |
| 8052 | for (++i; i != e; ++i) |
| 8053 | NewOps.push_back(getSCEVAtScope(AddRec->getOperand(i), L)); |
| 8054 | |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 8055 | const SCEV *FoldedRec = |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8056 | getAddRecExpr(NewOps, AddRec->getLoop(), |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 8057 | AddRec->getNoWrapFlags(SCEV::FlagNW)); |
| 8058 | AddRec = dyn_cast<SCEVAddRecExpr>(FoldedRec); |
Andrew Trick | 01eff82 | 2011-04-27 05:42:17 +0000 | [diff] [blame] | 8059 | // The addrec may be folded to a nonrecurrence, for example, if the |
| 8060 | // induction variable is multiplied by zero after constant folding. Go |
| 8061 | // ahead and return the folded value. |
Andrew Trick | 759ba08 | 2011-04-27 01:21:25 +0000 | [diff] [blame] | 8062 | if (!AddRec) |
| 8063 | return FoldedRec; |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 8064 | break; |
| 8065 | } |
| 8066 | |
| 8067 | // If the scope is outside the addrec's loop, evaluate it by using the |
| 8068 | // loop exit value of the addrec. |
| 8069 | if (!AddRec->getLoop()->contains(L)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8070 | // To evaluate this recurrence, we need to know how many times the AddRec |
| 8071 | // loop iterates. Compute this now. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8072 | const SCEV *BackedgeTakenCount = getBackedgeTakenCount(AddRec->getLoop()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 8073 | if (BackedgeTakenCount == getCouldNotCompute()) return AddRec; |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8074 | |
Eli Friedman | 61f6762 | 2008-08-04 23:49:06 +0000 | [diff] [blame] | 8075 | // Then, evaluate the AddRec. |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8076 | return AddRec->evaluateAtIteration(BackedgeTakenCount, *this); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8077 | } |
Dan Gohman | ae36b1e | 2010-06-29 23:43:06 +0000 | [diff] [blame] | 8078 | |
Dan Gohman | 8ca0885 | 2009-05-24 23:25:42 +0000 | [diff] [blame] | 8079 | return AddRec; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8080 | } |
| 8081 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8082 | if (const SCEVZeroExtendExpr *Cast = dyn_cast<SCEVZeroExtendExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8083 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 8084 | if (Op == Cast->getOperand()) |
| 8085 | return Cast; // must be loop invariant |
| 8086 | return getZeroExtendExpr(Op, Cast->getType()); |
| 8087 | } |
| 8088 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8089 | if (const SCEVSignExtendExpr *Cast = dyn_cast<SCEVSignExtendExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8090 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 8091 | if (Op == Cast->getOperand()) |
| 8092 | return Cast; // must be loop invariant |
| 8093 | return getSignExtendExpr(Op, Cast->getType()); |
| 8094 | } |
| 8095 | |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8096 | if (const SCEVTruncateExpr *Cast = dyn_cast<SCEVTruncateExpr>(V)) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8097 | const SCEV *Op = getSCEVAtScope(Cast->getOperand(), L); |
Dan Gohman | 0098d01 | 2009-04-29 22:29:01 +0000 | [diff] [blame] | 8098 | if (Op == Cast->getOperand()) |
| 8099 | return Cast; // must be loop invariant |
| 8100 | return getTruncateExpr(Op, Cast->getType()); |
| 8101 | } |
| 8102 | |
Torok Edwin | fbcc663 | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 8103 | llvm_unreachable("Unknown SCEV type!"); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8104 | } |
| 8105 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8106 | const SCEV *ScalarEvolution::getSCEVAtScope(Value *V, const Loop *L) { |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8107 | return getSCEVAtScope(getSCEV(V), L); |
| 8108 | } |
| 8109 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 8110 | /// Finds the minimum unsigned root of the following equation: |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8111 | /// |
| 8112 | /// A * X = B (mod N) |
| 8113 | /// |
| 8114 | /// where N = 2^BW and BW is the common bit width of A and B. The signedness of |
| 8115 | /// A and B isn't important. |
| 8116 | /// |
| 8117 | /// If the equation does not have a solution, SCEVCouldNotCompute is returned. |
Eli Friedman | 10d1ff6 | 2017-01-31 00:42:42 +0000 | [diff] [blame] | 8118 | static const SCEV *SolveLinEquationWithOverflow(const APInt &A, const SCEV *B, |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8119 | ScalarEvolution &SE) { |
| 8120 | uint32_t BW = A.getBitWidth(); |
Eli Friedman | 10d1ff6 | 2017-01-31 00:42:42 +0000 | [diff] [blame] | 8121 | assert(BW == SE.getTypeSizeInBits(B->getType())); |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8122 | assert(A != 0 && "A must be non-zero."); |
| 8123 | |
| 8124 | // 1. D = gcd(A, N) |
| 8125 | // |
| 8126 | // The gcd of A and N may have only one prime factor: 2. The number of |
| 8127 | // trailing zeros in A is its multiplicity |
| 8128 | uint32_t Mult2 = A.countTrailingZeros(); |
| 8129 | // D = 2^Mult2 |
| 8130 | |
| 8131 | // 2. Check if B is divisible by D. |
| 8132 | // |
| 8133 | // B is divisible by D if and only if the multiplicity of prime factor 2 for B |
| 8134 | // is not less than multiplicity of this prime factor for D. |
Eli Friedman | 10d1ff6 | 2017-01-31 00:42:42 +0000 | [diff] [blame] | 8135 | if (SE.GetMinTrailingZeros(B) < Mult2) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 8136 | return SE.getCouldNotCompute(); |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8137 | |
| 8138 | // 3. Compute I: the multiplicative inverse of (A / D) in arithmetic |
| 8139 | // modulo (N / D). |
| 8140 | // |
Eli Friedman | b5c3a0d | 2017-01-12 20:21:00 +0000 | [diff] [blame] | 8141 | // If D == 1, (N / D) == N == 2^BW, so we need one extra bit to represent |
| 8142 | // (N / D) in general. The inverse itself always fits into BW bits, though, |
| 8143 | // so we immediately truncate it. |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8144 | APInt AD = A.lshr(Mult2).zext(BW + 1); // AD = A / D |
| 8145 | APInt Mod(BW + 1, 0); |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 8146 | Mod.setBit(BW - Mult2); // Mod = N / D |
Eli Friedman | b5c3a0d | 2017-01-12 20:21:00 +0000 | [diff] [blame] | 8147 | APInt I = AD.multiplicativeInverse(Mod).trunc(BW); |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8148 | |
| 8149 | // 4. Compute the minimum unsigned root of the equation: |
| 8150 | // I * (B / D) mod (N / D) |
Eli Friedman | b5c3a0d | 2017-01-12 20:21:00 +0000 | [diff] [blame] | 8151 | // To simplify the computation, we factor out the divide by D: |
| 8152 | // (I * B mod N) / D |
Eli Friedman | 10d1ff6 | 2017-01-31 00:42:42 +0000 | [diff] [blame] | 8153 | const SCEV *D = SE.getConstant(APInt::getOneBitSet(BW, Mult2)); |
| 8154 | return SE.getUDivExactExpr(SE.getMulExpr(B, SE.getConstant(I)), D); |
Wojciech Matyjewicz | f0d21cd | 2008-07-20 15:55:14 +0000 | [diff] [blame] | 8155 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8156 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 8157 | /// Find the roots of the quadratic equation for the given quadratic chrec |
| 8158 | /// {L,+,M,+,N}. This returns either the two roots (which might be the same) or |
| 8159 | /// two SCEVCouldNotCompute objects. |
Sanjoy Das | 5a3d893 | 2016-06-15 04:37:47 +0000 | [diff] [blame] | 8160 | static Optional<std::pair<const SCEVConstant *,const SCEVConstant *>> |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 8161 | SolveQuadraticEquation(const SCEVAddRecExpr *AddRec, ScalarEvolution &SE) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8162 | assert(AddRec->getNumOperands() == 3 && "This is not a quadratic chrec!"); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8163 | const SCEVConstant *LC = dyn_cast<SCEVConstant>(AddRec->getOperand(0)); |
| 8164 | const SCEVConstant *MC = dyn_cast<SCEVConstant>(AddRec->getOperand(1)); |
| 8165 | const SCEVConstant *NC = dyn_cast<SCEVConstant>(AddRec->getOperand(2)); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8166 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8167 | // We currently can only solve this if the coefficients are constants. |
Sanjoy Das | 5a3d893 | 2016-06-15 04:37:47 +0000 | [diff] [blame] | 8168 | if (!LC || !MC || !NC) |
| 8169 | return None; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8170 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 8171 | uint32_t BitWidth = LC->getAPInt().getBitWidth(); |
| 8172 | const APInt &L = LC->getAPInt(); |
| 8173 | const APInt &M = MC->getAPInt(); |
| 8174 | const APInt &N = NC->getAPInt(); |
Reid Spencer | 983e3b3 | 2007-03-01 07:25:48 +0000 | [diff] [blame] | 8175 | APInt Two(BitWidth, 2); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8176 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8177 | // Convert from chrec coefficients to polynomial coefficients AX^2+BX+C |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8178 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8179 | // The A coefficient is N/2 |
Craig Topper | 716cad8 | 2017-05-15 18:14:16 +0000 | [diff] [blame] | 8180 | APInt A = N.sdiv(Two); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8181 | |
Craig Topper | e3e1a35 | 2017-05-11 06:48:54 +0000 | [diff] [blame] | 8182 | // The B coefficient is M-N/2 |
Craig Topper | 716cad8 | 2017-05-15 18:14:16 +0000 | [diff] [blame] | 8183 | APInt B = M; |
Craig Topper | e3e1a35 | 2017-05-11 06:48:54 +0000 | [diff] [blame] | 8184 | B -= A; // A is the same as N/2. |
| 8185 | |
| 8186 | // The C coefficient is L. |
| 8187 | const APInt& C = L; |
| 8188 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8189 | // Compute the B^2-4ac term. |
Craig Topper | 716cad8 | 2017-05-15 18:14:16 +0000 | [diff] [blame] | 8190 | APInt SqrtTerm = B; |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8191 | SqrtTerm *= B; |
| 8192 | SqrtTerm -= 4 * (A * C); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8193 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8194 | if (SqrtTerm.isNegative()) { |
| 8195 | // The loop is provably infinite. |
| 8196 | return None; |
| 8197 | } |
Nick Lewycky | fb78083 | 2012-08-01 09:14:36 +0000 | [diff] [blame] | 8198 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8199 | // Compute sqrt(B^2-4ac). This is guaranteed to be the nearest |
| 8200 | // integer value or else APInt::sqrt() will assert. |
Craig Topper | 716cad8 | 2017-05-15 18:14:16 +0000 | [diff] [blame] | 8201 | APInt SqrtVal = SqrtTerm.sqrt(); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8202 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8203 | // Compute the two solutions for the quadratic formula. |
| 8204 | // The divisions must be performed as signed divisions. |
Craig Topper | 716cad8 | 2017-05-15 18:14:16 +0000 | [diff] [blame] | 8205 | APInt NegB = -std::move(B); |
| 8206 | APInt TwoA = std::move(A); |
Craig Topper | e3e1a35 | 2017-05-11 06:48:54 +0000 | [diff] [blame] | 8207 | TwoA <<= 1; |
| 8208 | if (TwoA.isNullValue()) |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8209 | return None; |
Nick Lewycky | 7b14e20 | 2008-11-03 02:43:49 +0000 | [diff] [blame] | 8210 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8211 | LLVMContext &Context = SE.getContext(); |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 8212 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8213 | ConstantInt *Solution1 = |
| 8214 | ConstantInt::get(Context, (NegB + SqrtVal).sdiv(TwoA)); |
| 8215 | ConstantInt *Solution2 = |
| 8216 | ConstantInt::get(Context, (NegB - SqrtVal).sdiv(TwoA)); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8217 | |
Craig Topper | 6694a4e | 2017-05-11 06:48:51 +0000 | [diff] [blame] | 8218 | return std::make_pair(cast<SCEVConstant>(SE.getConstant(Solution1)), |
| 8219 | cast<SCEVConstant>(SE.getConstant(Solution2))); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8220 | } |
| 8221 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 8222 | ScalarEvolution::ExitLimit |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 8223 | ScalarEvolution::howFarToZero(const SCEV *V, const Loop *L, bool ControlsExit, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 8224 | bool AllowPredicates) { |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 8225 | |
| 8226 | // This is only used for loops with a "x != y" exit test. The exit condition |
| 8227 | // is now expressed as a single expression, V = x-y. So the exit test is |
| 8228 | // effectively V != 0. We know and take advantage of the fact that this |
| 8229 | // expression only being used in a comparison by zero context. |
| 8230 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 8231 | SmallPtrSet<const SCEVPredicate *, 4> Predicates; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8232 | // If the value is a constant |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8233 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(V)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8234 | // If the value is already zero, the branch will execute zero times. |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 8235 | if (C->getValue()->isZero()) return C; |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 8236 | return getCouldNotCompute(); // Otherwise it will loop infinitely. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8237 | } |
| 8238 | |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 8239 | const SCEVAddRecExpr *AddRec = dyn_cast<SCEVAddRecExpr>(V); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 8240 | if (!AddRec && AllowPredicates) |
| 8241 | // Try to make this an AddRec using runtime tests, in the first X |
| 8242 | // iterations of this loop, where X is the SCEV expression found by the |
| 8243 | // algorithm below. |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 8244 | AddRec = convertSCEVToAddRecWithPredicates(V, L, Predicates); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 8245 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8246 | if (!AddRec || AddRec->getLoop() != L) |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 8247 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8248 | |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 8249 | // If this is a quadratic (3-term) AddRec {L,+,M,+,N}, find the roots of |
| 8250 | // the quadratic equation to solve it. |
| 8251 | if (AddRec->isQuadratic() && AddRec->getType()->isIntegerTy()) { |
Sanjoy Das | 5a3d893 | 2016-06-15 04:37:47 +0000 | [diff] [blame] | 8252 | if (auto Roots = SolveQuadraticEquation(AddRec, *this)) { |
| 8253 | const SCEVConstant *R1 = Roots->first; |
| 8254 | const SCEVConstant *R2 = Roots->second; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8255 | // Pick the smallest positive root value. |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 8256 | if (ConstantInt *CB = dyn_cast<ConstantInt>(ConstantExpr::getICmp( |
| 8257 | CmpInst::ICMP_ULT, R1->getValue(), R2->getValue()))) { |
David Blaikie | dc3f01e | 2015-03-09 01:57:13 +0000 | [diff] [blame] | 8258 | if (!CB->getZExtValue()) |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 8259 | std::swap(R1, R2); // R1 is the minimum root now. |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 8260 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8261 | // We can only use this value if the chrec ends up with an exact zero |
| 8262 | // value at this index. When solving for "X*X != 5", for example, we |
| 8263 | // should not accept a root of 2. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8264 | const SCEV *Val = AddRec->evaluateAtIteration(R1, *this); |
Dan Gohman | be928e3 | 2008-06-18 16:23:07 +0000 | [diff] [blame] | 8265 | if (Val->isZero()) |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 8266 | // We found a quadratic root! |
| 8267 | return ExitLimit(R1, R1, false, Predicates); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8268 | } |
| 8269 | } |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 8270 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8271 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8272 | |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 8273 | // Otherwise we can only handle this if it is affine. |
| 8274 | if (!AddRec->isAffine()) |
| 8275 | return getCouldNotCompute(); |
| 8276 | |
| 8277 | // If this is an affine expression, the execution count of this branch is |
| 8278 | // the minimum unsigned root of the following equation: |
| 8279 | // |
| 8280 | // Start + Step*N = 0 (mod 2^BW) |
| 8281 | // |
| 8282 | // equivalent to: |
| 8283 | // |
| 8284 | // Step*N = -Start (mod 2^BW) |
| 8285 | // |
| 8286 | // where BW is the common bit width of Start and Step. |
| 8287 | |
| 8288 | // Get the initial value for the loop. |
| 8289 | const SCEV *Start = getSCEVAtScope(AddRec->getStart(), L->getParentLoop()); |
| 8290 | const SCEV *Step = getSCEVAtScope(AddRec->getOperand(1), L->getParentLoop()); |
| 8291 | |
| 8292 | // For now we handle only constant steps. |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8293 | // |
| 8294 | // TODO: Handle a nonconstant Step given AddRec<NUW>. If the |
| 8295 | // AddRec is NUW, then (in an unsigned sense) it cannot be counting up to wrap |
| 8296 | // to 0, it must be counting down to equal 0. Consequently, N = Start / -Step. |
| 8297 | // We have not yet seen any such cases. |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 8298 | const SCEVConstant *StepC = dyn_cast<SCEVConstant>(Step); |
Craig Topper | ca2c876 | 2017-07-06 18:39:49 +0000 | [diff] [blame] | 8299 | if (!StepC || StepC->getValue()->isZero()) |
Chris Lattner | dff679f | 2011-01-09 22:39:48 +0000 | [diff] [blame] | 8300 | return getCouldNotCompute(); |
| 8301 | |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8302 | // For positive steps (counting up until unsigned overflow): |
| 8303 | // N = -Start/Step (as unsigned) |
| 8304 | // For negative steps (counting down to zero): |
| 8305 | // N = Start/-Step |
| 8306 | // First compute the unsigned distance from zero in the direction of Step. |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 8307 | bool CountDown = StepC->getAPInt().isNegative(); |
Andrew Trick | f1781db | 2011-03-14 17:28:02 +0000 | [diff] [blame] | 8308 | const SCEV *Distance = CountDown ? Start : getNegativeSCEV(Start); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8309 | |
| 8310 | // Handle unitary steps, which cannot wraparound. |
Andrew Trick | f1781db | 2011-03-14 17:28:02 +0000 | [diff] [blame] | 8311 | // 1*N = -Start; -1*N = Start (mod 2^BW), so: |
| 8312 | // N = Distance (as unsigned) |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 8313 | if (StepC->getValue()->isOne() || StepC->getValue()->isMinusOne()) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8314 | APInt MaxBECount = getUnsignedRangeMax(Distance); |
Eli Friedman | bd6deda | 2017-01-11 21:07:15 +0000 | [diff] [blame] | 8315 | |
| 8316 | // When a loop like "for (int i = 0; i != n; ++i) { /* body */ }" is rotated, |
| 8317 | // we end up with a loop whose backedge-taken count is n - 1. Detect this |
| 8318 | // case, and see if we can improve the bound. |
| 8319 | // |
| 8320 | // Explicitly handling this here is necessary because getUnsignedRange |
| 8321 | // isn't context-sensitive; it doesn't know that we only care about the |
| 8322 | // range inside the loop. |
| 8323 | const SCEV *Zero = getZero(Distance->getType()); |
| 8324 | const SCEV *One = getOne(Distance->getType()); |
| 8325 | const SCEV *DistancePlusOne = getAddExpr(Distance, One); |
| 8326 | if (isLoopEntryGuardedByCond(L, ICmpInst::ICMP_NE, DistancePlusOne, Zero)) { |
| 8327 | // If Distance + 1 doesn't overflow, we can compute the maximum distance |
| 8328 | // as "unsigned_max(Distance + 1) - 1". |
| 8329 | ConstantRange CR = getUnsignedRange(DistancePlusOne); |
| 8330 | MaxBECount = APIntOps::umin(MaxBECount, CR.getUnsignedMax() - 1); |
| 8331 | } |
Eli Friedman | 8396265 | 2017-01-11 20:55:48 +0000 | [diff] [blame] | 8332 | return ExitLimit(Distance, getConstant(MaxBECount), false, Predicates); |
Nick Lewycky | 3155552 | 2011-10-03 07:10:45 +0000 | [diff] [blame] | 8333 | } |
Andrew Trick | 2a3b716 | 2011-03-09 17:23:39 +0000 | [diff] [blame] | 8334 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 8335 | // If the condition controls loop exit (the loop exits only if the expression |
| 8336 | // is true) and the addition is no-wrap we can use unsigned divide to |
| 8337 | // compute the backedge count. In this case, the step may not divide the |
| 8338 | // distance, but we don't care because if the condition is "missed" the loop |
| 8339 | // will have undefined behavior due to wrapping. |
Sanjoy Das | c7f69b9 | 2016-06-09 01:13:59 +0000 | [diff] [blame] | 8340 | if (ControlsExit && AddRec->hasNoSelfWrap() && |
| 8341 | loopHasNoAbnormalExits(AddRec->getLoop())) { |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 8342 | const SCEV *Exact = |
| 8343 | getUDivExpr(Distance, CountDown ? getNegativeSCEV(Step) : Step); |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 8344 | const SCEV *Max = |
| 8345 | Exact == getCouldNotCompute() |
| 8346 | ? Exact |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8347 | : getConstant(getUnsignedRangeMax(Exact)); |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 8348 | return ExitLimit(Exact, Max, false, Predicates); |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 8349 | } |
Benjamin Kramer | e75eaca | 2014-03-25 16:25:12 +0000 | [diff] [blame] | 8350 | |
Eli Friedman | 10d1ff6 | 2017-01-31 00:42:42 +0000 | [diff] [blame] | 8351 | // Solve the general equation. |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 8352 | const SCEV *E = SolveLinEquationWithOverflow(StepC->getAPInt(), |
| 8353 | getNegativeSCEV(Start), *this); |
| 8354 | const SCEV *M = E == getCouldNotCompute() |
| 8355 | ? E |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8356 | : getConstant(getUnsignedRangeMax(E)); |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 8357 | return ExitLimit(E, M, false, Predicates); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8358 | } |
| 8359 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 8360 | ScalarEvolution::ExitLimit |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 8361 | ScalarEvolution::howFarToNonZero(const SCEV *V, const Loop *L) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8362 | // Loops that look like: while (X == 0) are very strange indeed. We don't |
| 8363 | // handle them yet except for the trivial case. This could be expanded in the |
| 8364 | // future as needed. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8365 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8366 | // If the value is a constant, check to see if it is known to be non-zero |
| 8367 | // already. If so, the backedge will execute zero times. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 8368 | if (const SCEVConstant *C = dyn_cast<SCEVConstant>(V)) { |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 8369 | if (!C->getValue()->isZero()) |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 8370 | return getZero(C->getType()); |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 8371 | return getCouldNotCompute(); // Otherwise it will loop infinitely. |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8372 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 8373 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8374 | // We could implement others, but I really doubt anyone writes loops like |
| 8375 | // this, and if they did, they would already be constant folded. |
Dan Gohman | c5c85c0 | 2009-06-27 21:21:31 +0000 | [diff] [blame] | 8376 | return getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 8377 | } |
| 8378 | |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 8379 | std::pair<BasicBlock *, BasicBlock *> |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 8380 | ScalarEvolution::getPredecessorWithUniqueSuccessorForBB(BasicBlock *BB) { |
Dan Gohman | fa066ef | 2009-04-30 20:48:53 +0000 | [diff] [blame] | 8381 | // If the block has a unique predecessor, then there is no path from the |
| 8382 | // predecessor to the block that does not go through the direct edge |
| 8383 | // from the predecessor to the block. |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 8384 | if (BasicBlock *Pred = BB->getSinglePredecessor()) |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 8385 | return {Pred, BB}; |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 8386 | |
| 8387 | // A loop's header is defined to be a block that dominates the loop. |
Dan Gohman | 8c77f1a | 2009-05-18 15:36:09 +0000 | [diff] [blame] | 8388 | // If the header has a unique predecessor outside the loop, it must be |
| 8389 | // a block that has exactly one successor that can reach the loop. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 8390 | if (Loop *L = LI.getLoopFor(BB)) |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 8391 | return {L->getLoopPredecessor(), L->getHeader()}; |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 8392 | |
Sanjoy Das | c42f7cc | 2016-02-20 01:35:56 +0000 | [diff] [blame] | 8393 | return {nullptr, nullptr}; |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 8394 | } |
| 8395 | |
Sanjoy Das | f857081 | 2016-05-29 00:38:22 +0000 | [diff] [blame] | 8396 | /// SCEV structural equivalence is usually sufficient for testing whether two |
| 8397 | /// expressions are equal, however for the purposes of looking for a condition |
| 8398 | /// guarding a loop, it can be useful to be a little more general, since a |
| 8399 | /// front-end may have replicated the controlling expression. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 8400 | static bool HasSameValue(const SCEV *A, const SCEV *B) { |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 8401 | // Quick check to see if they are the same SCEV. |
| 8402 | if (A == B) return true; |
| 8403 | |
Sanjoy Das | f1090b6 | 2015-09-27 21:09:48 +0000 | [diff] [blame] | 8404 | auto ComputesEqualValues = [](const Instruction *A, const Instruction *B) { |
| 8405 | // Not all instructions that are "identical" compute the same value. For |
| 8406 | // instance, two distinct alloca instructions allocating the same type are |
| 8407 | // identical and do not read memory; but compute distinct values. |
| 8408 | return A->isIdenticalTo(B) && (isa<BinaryOperator>(A) || isa<GetElementPtrInst>(A)); |
| 8409 | }; |
| 8410 | |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 8411 | // Otherwise, if they're both SCEVUnknown, it's possible that they hold |
| 8412 | // two different instructions with the same value. Check for this case. |
| 8413 | if (const SCEVUnknown *AU = dyn_cast<SCEVUnknown>(A)) |
| 8414 | if (const SCEVUnknown *BU = dyn_cast<SCEVUnknown>(B)) |
| 8415 | if (const Instruction *AI = dyn_cast<Instruction>(AU->getValue())) |
| 8416 | if (const Instruction *BI = dyn_cast<Instruction>(BU->getValue())) |
Sanjoy Das | f1090b6 | 2015-09-27 21:09:48 +0000 | [diff] [blame] | 8417 | if (ComputesEqualValues(AI, BI)) |
Dan Gohman | 450f4e0 | 2009-06-20 00:35:32 +0000 | [diff] [blame] | 8418 | return true; |
| 8419 | |
| 8420 | // Otherwise assume they may have a different value. |
| 8421 | return false; |
| 8422 | } |
| 8423 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8424 | bool ScalarEvolution::SimplifyICmpOperands(ICmpInst::Predicate &Pred, |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 8425 | const SCEV *&LHS, const SCEV *&RHS, |
| 8426 | unsigned Depth) { |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8427 | bool Changed = false; |
| 8428 | |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 8429 | // If we hit the max recursion limit bail out. |
| 8430 | if (Depth >= 3) |
| 8431 | return false; |
| 8432 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8433 | // Canonicalize a constant to the right side. |
| 8434 | if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(LHS)) { |
| 8435 | // Check for both operands constant. |
| 8436 | if (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(RHS)) { |
| 8437 | if (ConstantExpr::getICmp(Pred, |
| 8438 | LHSC->getValue(), |
| 8439 | RHSC->getValue())->isNullValue()) |
| 8440 | goto trivially_false; |
| 8441 | else |
| 8442 | goto trivially_true; |
| 8443 | } |
| 8444 | // Otherwise swap the operands to put the constant on the right. |
| 8445 | std::swap(LHS, RHS); |
| 8446 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 8447 | Changed = true; |
| 8448 | } |
| 8449 | |
| 8450 | // If we're comparing an addrec with a value which is loop-invariant in the |
Dan Gohman | df564ca | 2010-05-03 17:00:11 +0000 | [diff] [blame] | 8451 | // addrec's loop, put the addrec on the left. Also make a dominance check, |
| 8452 | // as both operands could be addrecs loop-invariant in each other's loop. |
| 8453 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(RHS)) { |
| 8454 | const Loop *L = AR->getLoop(); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 8455 | if (isLoopInvariant(LHS, L) && properlyDominates(LHS, L->getHeader())) { |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8456 | std::swap(LHS, RHS); |
| 8457 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 8458 | Changed = true; |
| 8459 | } |
Dan Gohman | df564ca | 2010-05-03 17:00:11 +0000 | [diff] [blame] | 8460 | } |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8461 | |
| 8462 | // If there's a constant operand, canonicalize comparisons with boundary |
| 8463 | // cases, and canonicalize *-or-equal comparisons to regular comparisons. |
| 8464 | if (const SCEVConstant *RC = dyn_cast<SCEVConstant>(RHS)) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 8465 | const APInt &RA = RC->getAPInt(); |
Sanjoy Das | 4aeb0f2 | 2016-10-02 20:59:10 +0000 | [diff] [blame] | 8466 | |
| 8467 | bool SimplifiedByConstantRange = false; |
| 8468 | |
| 8469 | if (!ICmpInst::isEquality(Pred)) { |
| 8470 | ConstantRange ExactCR = ConstantRange::makeExactICmpRegion(Pred, RA); |
| 8471 | if (ExactCR.isFullSet()) |
| 8472 | goto trivially_true; |
| 8473 | else if (ExactCR.isEmptySet()) |
| 8474 | goto trivially_false; |
| 8475 | |
| 8476 | APInt NewRHS; |
| 8477 | CmpInst::Predicate NewPred; |
| 8478 | if (ExactCR.getEquivalentICmp(NewPred, NewRHS) && |
| 8479 | ICmpInst::isEquality(NewPred)) { |
| 8480 | // We were able to convert an inequality to an equality. |
| 8481 | Pred = NewPred; |
| 8482 | RHS = getConstant(NewRHS); |
| 8483 | Changed = SimplifiedByConstantRange = true; |
| 8484 | } |
| 8485 | } |
| 8486 | |
| 8487 | if (!SimplifiedByConstantRange) { |
| 8488 | switch (Pred) { |
| 8489 | default: |
| 8490 | break; |
| 8491 | case ICmpInst::ICMP_EQ: |
| 8492 | case ICmpInst::ICMP_NE: |
| 8493 | // Fold ((-1) * %a) + %b == 0 (equivalent to %b-%a == 0) into %a == %b. |
| 8494 | if (!RA) |
| 8495 | if (const SCEVAddExpr *AE = dyn_cast<SCEVAddExpr>(LHS)) |
| 8496 | if (const SCEVMulExpr *ME = |
| 8497 | dyn_cast<SCEVMulExpr>(AE->getOperand(0))) |
| 8498 | if (AE->getNumOperands() == 2 && ME->getNumOperands() == 2 && |
| 8499 | ME->getOperand(0)->isAllOnesValue()) { |
| 8500 | RHS = AE->getOperand(1); |
| 8501 | LHS = ME->getOperand(1); |
| 8502 | Changed = true; |
| 8503 | } |
| 8504 | break; |
| 8505 | |
| 8506 | |
| 8507 | // The "Should have been caught earlier!" messages refer to the fact |
| 8508 | // that the ExactCR.isFullSet() or ExactCR.isEmptySet() check above |
| 8509 | // should have fired on the corresponding cases, and canonicalized the |
| 8510 | // check to trivially_true or trivially_false. |
| 8511 | |
| 8512 | case ICmpInst::ICMP_UGE: |
| 8513 | assert(!RA.isMinValue() && "Should have been caught earlier!"); |
| 8514 | Pred = ICmpInst::ICMP_UGT; |
Sanjoy Das | f230b0a | 2016-10-02 02:40:27 +0000 | [diff] [blame] | 8515 | RHS = getConstant(RA - 1); |
| 8516 | Changed = true; |
| 8517 | break; |
Sanjoy Das | 4aeb0f2 | 2016-10-02 20:59:10 +0000 | [diff] [blame] | 8518 | case ICmpInst::ICMP_ULE: |
| 8519 | assert(!RA.isMaxValue() && "Should have been caught earlier!"); |
| 8520 | Pred = ICmpInst::ICMP_ULT; |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8521 | RHS = getConstant(RA + 1); |
| 8522 | Changed = true; |
| 8523 | break; |
Sanjoy Das | 4aeb0f2 | 2016-10-02 20:59:10 +0000 | [diff] [blame] | 8524 | case ICmpInst::ICMP_SGE: |
| 8525 | assert(!RA.isMinSignedValue() && "Should have been caught earlier!"); |
| 8526 | Pred = ICmpInst::ICMP_SGT; |
Sanjoy Das | f230b0a | 2016-10-02 02:40:27 +0000 | [diff] [blame] | 8527 | RHS = getConstant(RA - 1); |
| 8528 | Changed = true; |
| 8529 | break; |
Sanjoy Das | 4aeb0f2 | 2016-10-02 20:59:10 +0000 | [diff] [blame] | 8530 | case ICmpInst::ICMP_SLE: |
| 8531 | assert(!RA.isMaxSignedValue() && "Should have been caught earlier!"); |
| 8532 | Pred = ICmpInst::ICMP_SLT; |
Sanjoy Das | f230b0a | 2016-10-02 02:40:27 +0000 | [diff] [blame] | 8533 | RHS = getConstant(RA + 1); |
| 8534 | Changed = true; |
| 8535 | break; |
| 8536 | } |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8537 | } |
| 8538 | } |
| 8539 | |
| 8540 | // Check for obvious equality. |
| 8541 | if (HasSameValue(LHS, RHS)) { |
| 8542 | if (ICmpInst::isTrueWhenEqual(Pred)) |
| 8543 | goto trivially_true; |
| 8544 | if (ICmpInst::isFalseWhenEqual(Pred)) |
| 8545 | goto trivially_false; |
| 8546 | } |
| 8547 | |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8548 | // If possible, canonicalize GE/LE comparisons to GT/LT comparisons, by |
| 8549 | // adding or subtracting 1 from one of the operands. |
| 8550 | switch (Pred) { |
| 8551 | case ICmpInst::ICMP_SLE: |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8552 | if (!getSignedRangeMax(RHS).isMaxSignedValue()) { |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8553 | RHS = getAddExpr(getConstant(RHS->getType(), 1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8554 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8555 | Pred = ICmpInst::ICMP_SLT; |
| 8556 | Changed = true; |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8557 | } else if (!getSignedRangeMin(LHS).isMinSignedValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 8558 | LHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8559 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8560 | Pred = ICmpInst::ICMP_SLT; |
| 8561 | Changed = true; |
| 8562 | } |
| 8563 | break; |
| 8564 | case ICmpInst::ICMP_SGE: |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8565 | if (!getSignedRangeMin(RHS).isMinSignedValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 8566 | RHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8567 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8568 | Pred = ICmpInst::ICMP_SGT; |
| 8569 | Changed = true; |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8570 | } else if (!getSignedRangeMax(LHS).isMaxSignedValue()) { |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8571 | LHS = getAddExpr(getConstant(RHS->getType(), 1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8572 | SCEV::FlagNSW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8573 | Pred = ICmpInst::ICMP_SGT; |
| 8574 | Changed = true; |
| 8575 | } |
| 8576 | break; |
| 8577 | case ICmpInst::ICMP_ULE: |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8578 | if (!getUnsignedRangeMax(RHS).isMaxValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 8579 | RHS = getAddExpr(getConstant(RHS->getType(), 1, true), RHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8580 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8581 | Pred = ICmpInst::ICMP_ULT; |
| 8582 | Changed = true; |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8583 | } else if (!getUnsignedRangeMin(LHS).isMinValue()) { |
Peter Collingbourne | c85f4ce | 2015-11-20 01:26:13 +0000 | [diff] [blame] | 8584 | LHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), LHS); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8585 | Pred = ICmpInst::ICMP_ULT; |
| 8586 | Changed = true; |
| 8587 | } |
| 8588 | break; |
| 8589 | case ICmpInst::ICMP_UGE: |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8590 | if (!getUnsignedRangeMin(RHS).isMinValue()) { |
Peter Collingbourne | c85f4ce | 2015-11-20 01:26:13 +0000 | [diff] [blame] | 8591 | RHS = getAddExpr(getConstant(RHS->getType(), (uint64_t)-1, true), RHS); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8592 | Pred = ICmpInst::ICMP_UGT; |
| 8593 | Changed = true; |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8594 | } else if (!getUnsignedRangeMax(LHS).isMaxValue()) { |
Dan Gohman | 267700c | 2010-05-03 20:23:47 +0000 | [diff] [blame] | 8595 | LHS = getAddExpr(getConstant(RHS->getType(), 1, true), LHS, |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 8596 | SCEV::FlagNUW); |
Dan Gohman | 81585c1 | 2010-05-03 16:35:17 +0000 | [diff] [blame] | 8597 | Pred = ICmpInst::ICMP_UGT; |
| 8598 | Changed = true; |
| 8599 | } |
| 8600 | break; |
| 8601 | default: |
| 8602 | break; |
| 8603 | } |
| 8604 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8605 | // TODO: More simplifications are possible here. |
| 8606 | |
Benjamin Kramer | 50b26eb | 2012-05-30 18:32:23 +0000 | [diff] [blame] | 8607 | // Recursively simplify until we either hit a recursion limit or nothing |
| 8608 | // changes. |
| 8609 | if (Changed) |
| 8610 | return SimplifyICmpOperands(Pred, LHS, RHS, Depth+1); |
| 8611 | |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8612 | return Changed; |
| 8613 | |
| 8614 | trivially_true: |
| 8615 | // Return 0 == 0. |
Benjamin Kramer | ddd1b7b | 2010-11-20 18:43:35 +0000 | [diff] [blame] | 8616 | LHS = RHS = getConstant(ConstantInt::getFalse(getContext())); |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8617 | Pred = ICmpInst::ICMP_EQ; |
| 8618 | return true; |
| 8619 | |
| 8620 | trivially_false: |
| 8621 | // Return 0 != 0. |
Benjamin Kramer | ddd1b7b | 2010-11-20 18:43:35 +0000 | [diff] [blame] | 8622 | LHS = RHS = getConstant(ConstantInt::getFalse(getContext())); |
Dan Gohman | 48ff3cf | 2010-04-24 01:28:42 +0000 | [diff] [blame] | 8623 | Pred = ICmpInst::ICMP_NE; |
| 8624 | return true; |
| 8625 | } |
| 8626 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8627 | bool ScalarEvolution::isKnownNegative(const SCEV *S) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8628 | return getSignedRangeMax(S).isNegative(); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8629 | } |
| 8630 | |
| 8631 | bool ScalarEvolution::isKnownPositive(const SCEV *S) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8632 | return getSignedRangeMin(S).isStrictlyPositive(); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8633 | } |
| 8634 | |
| 8635 | bool ScalarEvolution::isKnownNonNegative(const SCEV *S) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8636 | return !getSignedRangeMin(S).isNegative(); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8637 | } |
| 8638 | |
| 8639 | bool ScalarEvolution::isKnownNonPositive(const SCEV *S) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 8640 | return !getSignedRangeMax(S).isStrictlyPositive(); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8641 | } |
| 8642 | |
| 8643 | bool ScalarEvolution::isKnownNonZero(const SCEV *S) { |
| 8644 | return isKnownNegative(S) || isKnownPositive(S); |
| 8645 | } |
| 8646 | |
| 8647 | bool ScalarEvolution::isKnownPredicate(ICmpInst::Predicate Pred, |
| 8648 | const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | 36cce7e | 2010-04-24 01:38:36 +0000 | [diff] [blame] | 8649 | // Canonicalize the inputs first. |
| 8650 | (void)SimplifyICmpOperands(Pred, LHS, RHS); |
| 8651 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 8652 | // If LHS or RHS is an addrec, check to see if the condition is true in |
| 8653 | // every iteration of the loop. |
Justin Bogner | cbb8438 | 2014-05-23 00:06:56 +0000 | [diff] [blame] | 8654 | // If LHS and RHS are both addrec, both conditions must be true in |
| 8655 | // every iteration of the loop. |
| 8656 | const SCEVAddRecExpr *LAR = dyn_cast<SCEVAddRecExpr>(LHS); |
| 8657 | const SCEVAddRecExpr *RAR = dyn_cast<SCEVAddRecExpr>(RHS); |
| 8658 | bool LeftGuarded = false; |
| 8659 | bool RightGuarded = false; |
| 8660 | if (LAR) { |
| 8661 | const Loop *L = LAR->getLoop(); |
| 8662 | if (isLoopEntryGuardedByCond(L, Pred, LAR->getStart(), RHS) && |
| 8663 | isLoopBackedgeGuardedByCond(L, Pred, LAR->getPostIncExpr(*this), RHS)) { |
| 8664 | if (!RAR) return true; |
| 8665 | LeftGuarded = true; |
| 8666 | } |
| 8667 | } |
| 8668 | if (RAR) { |
| 8669 | const Loop *L = RAR->getLoop(); |
| 8670 | if (isLoopEntryGuardedByCond(L, Pred, LHS, RAR->getStart()) && |
| 8671 | isLoopBackedgeGuardedByCond(L, Pred, LHS, RAR->getPostIncExpr(*this))) { |
| 8672 | if (!LAR) return true; |
| 8673 | RightGuarded = true; |
| 8674 | } |
| 8675 | } |
| 8676 | if (LeftGuarded && RightGuarded) |
| 8677 | return true; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8678 | |
Sanjoy Das | 7d910f2 | 2015-10-02 18:50:30 +0000 | [diff] [blame] | 8679 | if (isKnownPredicateViaSplitting(Pred, LHS, RHS)) |
| 8680 | return true; |
| 8681 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 8682 | // Otherwise see what can be done with known constant ranges. |
Sanjoy Das | 401e631 | 2016-02-01 20:48:10 +0000 | [diff] [blame] | 8683 | return isKnownPredicateViaConstantRanges(Pred, LHS, RHS); |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 8684 | } |
| 8685 | |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8686 | bool ScalarEvolution::isMonotonicPredicate(const SCEVAddRecExpr *LHS, |
| 8687 | ICmpInst::Predicate Pred, |
| 8688 | bool &Increasing) { |
| 8689 | bool Result = isMonotonicPredicateImpl(LHS, Pred, Increasing); |
| 8690 | |
| 8691 | #ifndef NDEBUG |
| 8692 | // Verify an invariant: inverting the predicate should turn a monotonically |
| 8693 | // increasing change to a monotonically decreasing one, and vice versa. |
| 8694 | bool IncreasingSwapped; |
| 8695 | bool ResultSwapped = isMonotonicPredicateImpl( |
| 8696 | LHS, ICmpInst::getSwappedPredicate(Pred), IncreasingSwapped); |
| 8697 | |
| 8698 | assert(Result == ResultSwapped && "should be able to analyze both!"); |
| 8699 | if (ResultSwapped) |
| 8700 | assert(Increasing == !IncreasingSwapped && |
| 8701 | "monotonicity should flip as we flip the predicate"); |
| 8702 | #endif |
| 8703 | |
| 8704 | return Result; |
| 8705 | } |
| 8706 | |
| 8707 | bool ScalarEvolution::isMonotonicPredicateImpl(const SCEVAddRecExpr *LHS, |
| 8708 | ICmpInst::Predicate Pred, |
| 8709 | bool &Increasing) { |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8710 | |
| 8711 | // A zero step value for LHS means the induction variable is essentially a |
| 8712 | // loop invariant value. We don't really depend on the predicate actually |
| 8713 | // flipping from false to true (for increasing predicates, and the other way |
| 8714 | // around for decreasing predicates), all we care about is that *if* the |
| 8715 | // predicate changes then it only changes from false to true. |
| 8716 | // |
| 8717 | // A zero step value in itself is not very useful, but there may be places |
| 8718 | // where SCEV can prove X >= 0 but not prove X > 0, so it is helpful to be |
| 8719 | // as general as possible. |
| 8720 | |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 8721 | switch (Pred) { |
| 8722 | default: |
| 8723 | return false; // Conservative answer |
| 8724 | |
| 8725 | case ICmpInst::ICMP_UGT: |
| 8726 | case ICmpInst::ICMP_UGE: |
| 8727 | case ICmpInst::ICMP_ULT: |
| 8728 | case ICmpInst::ICMP_ULE: |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 8729 | if (!LHS->hasNoUnsignedWrap()) |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 8730 | return false; |
| 8731 | |
| 8732 | Increasing = Pred == ICmpInst::ICMP_UGT || Pred == ICmpInst::ICMP_UGE; |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8733 | return true; |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 8734 | |
| 8735 | case ICmpInst::ICMP_SGT: |
| 8736 | case ICmpInst::ICMP_SGE: |
| 8737 | case ICmpInst::ICMP_SLT: |
| 8738 | case ICmpInst::ICMP_SLE: { |
Sanjoy Das | 76c48e0 | 2016-02-04 18:21:54 +0000 | [diff] [blame] | 8739 | if (!LHS->hasNoSignedWrap()) |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 8740 | return false; |
| 8741 | |
| 8742 | const SCEV *Step = LHS->getStepRecurrence(*this); |
| 8743 | |
| 8744 | if (isKnownNonNegative(Step)) { |
| 8745 | Increasing = Pred == ICmpInst::ICMP_SGT || Pred == ICmpInst::ICMP_SGE; |
| 8746 | return true; |
| 8747 | } |
| 8748 | |
| 8749 | if (isKnownNonPositive(Step)) { |
| 8750 | Increasing = Pred == ICmpInst::ICMP_SLT || Pred == ICmpInst::ICMP_SLE; |
| 8751 | return true; |
| 8752 | } |
| 8753 | |
| 8754 | return false; |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8755 | } |
| 8756 | |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8757 | } |
| 8758 | |
Sanjoy Das | 366acc1 | 2015-08-06 20:43:41 +0000 | [diff] [blame] | 8759 | llvm_unreachable("switch has default clause!"); |
Sanjoy Das | 5dab205 | 2015-07-27 21:42:49 +0000 | [diff] [blame] | 8760 | } |
| 8761 | |
| 8762 | bool ScalarEvolution::isLoopInvariantPredicate( |
| 8763 | ICmpInst::Predicate Pred, const SCEV *LHS, const SCEV *RHS, const Loop *L, |
| 8764 | ICmpInst::Predicate &InvariantPred, const SCEV *&InvariantLHS, |
| 8765 | const SCEV *&InvariantRHS) { |
| 8766 | |
| 8767 | // If there is a loop-invariant, force it into the RHS, otherwise bail out. |
| 8768 | if (!isLoopInvariant(RHS, L)) { |
| 8769 | if (!isLoopInvariant(LHS, L)) |
| 8770 | return false; |
| 8771 | |
| 8772 | std::swap(LHS, RHS); |
| 8773 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 8774 | } |
| 8775 | |
| 8776 | const SCEVAddRecExpr *ArLHS = dyn_cast<SCEVAddRecExpr>(LHS); |
| 8777 | if (!ArLHS || ArLHS->getLoop() != L) |
| 8778 | return false; |
| 8779 | |
| 8780 | bool Increasing; |
| 8781 | if (!isMonotonicPredicate(ArLHS, Pred, Increasing)) |
| 8782 | return false; |
| 8783 | |
| 8784 | // If the predicate "ArLHS `Pred` RHS" monotonically increases from false to |
| 8785 | // true as the loop iterates, and the backedge is control dependent on |
| 8786 | // "ArLHS `Pred` RHS" == true then we can reason as follows: |
| 8787 | // |
| 8788 | // * if the predicate was false in the first iteration then the predicate |
| 8789 | // is never evaluated again, since the loop exits without taking the |
| 8790 | // backedge. |
| 8791 | // * if the predicate was true in the first iteration then it will |
| 8792 | // continue to be true for all future iterations since it is |
| 8793 | // monotonically increasing. |
| 8794 | // |
| 8795 | // For both the above possibilities, we can replace the loop varying |
| 8796 | // predicate with its value on the first iteration of the loop (which is |
| 8797 | // loop invariant). |
| 8798 | // |
| 8799 | // A similar reasoning applies for a monotonically decreasing predicate, by |
| 8800 | // replacing true with false and false with true in the above two bullets. |
| 8801 | |
| 8802 | auto P = Increasing ? Pred : ICmpInst::getInversePredicate(Pred); |
| 8803 | |
| 8804 | if (!isLoopBackedgeGuardedByCond(L, P, LHS, RHS)) |
| 8805 | return false; |
| 8806 | |
| 8807 | InvariantPred = Pred; |
| 8808 | InvariantLHS = ArLHS->getStart(); |
| 8809 | InvariantRHS = RHS; |
| 8810 | return true; |
| 8811 | } |
| 8812 | |
Sanjoy Das | 401e631 | 2016-02-01 20:48:10 +0000 | [diff] [blame] | 8813 | bool ScalarEvolution::isKnownPredicateViaConstantRanges( |
| 8814 | ICmpInst::Predicate Pred, const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8815 | if (HasSameValue(LHS, RHS)) |
| 8816 | return ICmpInst::isTrueWhenEqual(Pred); |
| 8817 | |
Dan Gohman | 0759169 | 2010-04-11 22:16:48 +0000 | [diff] [blame] | 8818 | // This code is split out from isKnownPredicate because it is called from |
| 8819 | // within isLoopEntryGuardedByCond. |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8820 | |
Sanjoy Das | 4c7b6d7 | 2016-02-01 20:48:14 +0000 | [diff] [blame] | 8821 | auto CheckRanges = |
| 8822 | [&](const ConstantRange &RangeLHS, const ConstantRange &RangeRHS) { |
| 8823 | return ConstantRange::makeSatisfyingICmpRegion(Pred, RangeRHS) |
| 8824 | .contains(RangeLHS); |
| 8825 | }; |
| 8826 | |
| 8827 | // The check at the top of the function catches the case where the values are |
| 8828 | // known to be equal. |
| 8829 | if (Pred == CmpInst::ICMP_EQ) |
| 8830 | return false; |
| 8831 | |
| 8832 | if (Pred == CmpInst::ICMP_NE) |
| 8833 | return CheckRanges(getSignedRange(LHS), getSignedRange(RHS)) || |
| 8834 | CheckRanges(getUnsignedRange(LHS), getUnsignedRange(RHS)) || |
| 8835 | isKnownNonZero(getMinusSCEV(LHS, RHS)); |
| 8836 | |
| 8837 | if (CmpInst::isSigned(Pred)) |
| 8838 | return CheckRanges(getSignedRange(LHS), getSignedRange(RHS)); |
| 8839 | |
| 8840 | return CheckRanges(getUnsignedRange(LHS), getUnsignedRange(RHS)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8841 | } |
| 8842 | |
Sanjoy Das | c1a2977 | 2015-11-05 23:45:38 +0000 | [diff] [blame] | 8843 | bool ScalarEvolution::isKnownPredicateViaNoOverflow(ICmpInst::Predicate Pred, |
| 8844 | const SCEV *LHS, |
| 8845 | const SCEV *RHS) { |
Sanjoy Das | c1a2977 | 2015-11-05 23:45:38 +0000 | [diff] [blame] | 8846 | // Match Result to (X + Y)<ExpectedFlags> where Y is a constant integer. |
| 8847 | // Return Y via OutY. |
| 8848 | auto MatchBinaryAddToConst = |
| 8849 | [this](const SCEV *Result, const SCEV *X, APInt &OutY, |
| 8850 | SCEV::NoWrapFlags ExpectedFlags) { |
| 8851 | const SCEV *NonConstOp, *ConstOp; |
| 8852 | SCEV::NoWrapFlags FlagsPresent; |
| 8853 | |
| 8854 | if (!splitBinaryAdd(Result, ConstOp, NonConstOp, FlagsPresent) || |
| 8855 | !isa<SCEVConstant>(ConstOp) || NonConstOp != X) |
| 8856 | return false; |
| 8857 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 8858 | OutY = cast<SCEVConstant>(ConstOp)->getAPInt(); |
Sanjoy Das | c1a2977 | 2015-11-05 23:45:38 +0000 | [diff] [blame] | 8859 | return (FlagsPresent & ExpectedFlags) == ExpectedFlags; |
| 8860 | }; |
| 8861 | |
| 8862 | APInt C; |
| 8863 | |
| 8864 | switch (Pred) { |
| 8865 | default: |
| 8866 | break; |
| 8867 | |
| 8868 | case ICmpInst::ICMP_SGE: |
| 8869 | std::swap(LHS, RHS); |
Galina Kistanova | 8514dd5 | 2017-05-31 22:09:46 +0000 | [diff] [blame] | 8870 | LLVM_FALLTHROUGH; |
Sanjoy Das | c1a2977 | 2015-11-05 23:45:38 +0000 | [diff] [blame] | 8871 | case ICmpInst::ICMP_SLE: |
| 8872 | // X s<= (X + C)<nsw> if C >= 0 |
| 8873 | if (MatchBinaryAddToConst(RHS, LHS, C, SCEV::FlagNSW) && C.isNonNegative()) |
| 8874 | return true; |
| 8875 | |
| 8876 | // (X + C)<nsw> s<= X if C <= 0 |
| 8877 | if (MatchBinaryAddToConst(LHS, RHS, C, SCEV::FlagNSW) && |
| 8878 | !C.isStrictlyPositive()) |
| 8879 | return true; |
| 8880 | break; |
| 8881 | |
| 8882 | case ICmpInst::ICMP_SGT: |
| 8883 | std::swap(LHS, RHS); |
Galina Kistanova | 8514dd5 | 2017-05-31 22:09:46 +0000 | [diff] [blame] | 8884 | LLVM_FALLTHROUGH; |
Sanjoy Das | c1a2977 | 2015-11-05 23:45:38 +0000 | [diff] [blame] | 8885 | case ICmpInst::ICMP_SLT: |
| 8886 | // X s< (X + C)<nsw> if C > 0 |
| 8887 | if (MatchBinaryAddToConst(RHS, LHS, C, SCEV::FlagNSW) && |
| 8888 | C.isStrictlyPositive()) |
| 8889 | return true; |
| 8890 | |
| 8891 | // (X + C)<nsw> s< X if C < 0 |
| 8892 | if (MatchBinaryAddToConst(LHS, RHS, C, SCEV::FlagNSW) && C.isNegative()) |
| 8893 | return true; |
| 8894 | break; |
| 8895 | } |
| 8896 | |
| 8897 | return false; |
| 8898 | } |
| 8899 | |
Sanjoy Das | 7d910f2 | 2015-10-02 18:50:30 +0000 | [diff] [blame] | 8900 | bool ScalarEvolution::isKnownPredicateViaSplitting(ICmpInst::Predicate Pred, |
| 8901 | const SCEV *LHS, |
| 8902 | const SCEV *RHS) { |
Sanjoy Das | 10dffcb | 2015-10-08 03:46:00 +0000 | [diff] [blame] | 8903 | if (Pred != ICmpInst::ICMP_ULT || ProvingSplitPredicate) |
Sanjoy Das | 7d910f2 | 2015-10-02 18:50:30 +0000 | [diff] [blame] | 8904 | return false; |
| 8905 | |
| 8906 | // Allowing arbitrary number of activations of isKnownPredicateViaSplitting on |
| 8907 | // the stack can result in exponential time complexity. |
| 8908 | SaveAndRestore<bool> Restore(ProvingSplitPredicate, true); |
| 8909 | |
| 8910 | // If L >= 0 then I `ult` L <=> I >= 0 && I `slt` L |
| 8911 | // |
| 8912 | // To prove L >= 0 we use isKnownNonNegative whereas to prove I >= 0 we use |
| 8913 | // isKnownPredicate. isKnownPredicate is more powerful, but also more |
| 8914 | // expensive; and using isKnownNonNegative(RHS) is sufficient for most of the |
| 8915 | // interesting cases seen in practice. We can consider "upgrading" L >= 0 to |
| 8916 | // use isKnownPredicate later if needed. |
Alexander Kornienko | 484e48e3 | 2015-11-05 21:07:12 +0000 | [diff] [blame] | 8917 | return isKnownNonNegative(RHS) && |
| 8918 | isKnownPredicate(CmpInst::ICMP_SGE, LHS, getZero(LHS->getType())) && |
| 8919 | isKnownPredicate(CmpInst::ICMP_SLT, LHS, RHS); |
Sanjoy Das | 7d910f2 | 2015-10-02 18:50:30 +0000 | [diff] [blame] | 8920 | } |
| 8921 | |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 8922 | bool ScalarEvolution::isImpliedViaGuard(BasicBlock *BB, |
| 8923 | ICmpInst::Predicate Pred, |
| 8924 | const SCEV *LHS, const SCEV *RHS) { |
| 8925 | // No need to even try if we know the module has no guards. |
| 8926 | if (!HasGuards) |
| 8927 | return false; |
| 8928 | |
| 8929 | return any_of(*BB, [&](Instruction &I) { |
| 8930 | using namespace llvm::PatternMatch; |
| 8931 | |
| 8932 | Value *Condition; |
| 8933 | return match(&I, m_Intrinsic<Intrinsic::experimental_guard>( |
| 8934 | m_Value(Condition))) && |
| 8935 | isImpliedCond(Pred, LHS, RHS, Condition, false); |
| 8936 | }); |
| 8937 | } |
| 8938 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8939 | /// isLoopBackedgeGuardedByCond - Test whether the backedge of the loop is |
| 8940 | /// protected by a conditional between LHS and RHS. This is used to |
| 8941 | /// to eliminate casts. |
| 8942 | bool |
| 8943 | ScalarEvolution::isLoopBackedgeGuardedByCond(const Loop *L, |
| 8944 | ICmpInst::Predicate Pred, |
| 8945 | const SCEV *LHS, const SCEV *RHS) { |
| 8946 | // Interpret a null as meaning no loop, where there is obviously no guard |
| 8947 | // (interprocedural conditions notwithstanding). |
| 8948 | if (!L) return true; |
| 8949 | |
Sanjoy Das | 401e631 | 2016-02-01 20:48:10 +0000 | [diff] [blame] | 8950 | if (isKnownPredicateViaConstantRanges(Pred, LHS, RHS)) |
| 8951 | return true; |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 8952 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8953 | BasicBlock *Latch = L->getLoopLatch(); |
| 8954 | if (!Latch) |
| 8955 | return false; |
| 8956 | |
| 8957 | BranchInst *LoopContinuePredicate = |
| 8958 | dyn_cast<BranchInst>(Latch->getTerminator()); |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 8959 | if (LoopContinuePredicate && LoopContinuePredicate->isConditional() && |
| 8960 | isImpliedCond(Pred, LHS, RHS, |
| 8961 | LoopContinuePredicate->getCondition(), |
| 8962 | LoopContinuePredicate->getSuccessor(0) != L->getHeader())) |
| 8963 | return true; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 8964 | |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 8965 | // We don't want more than one activation of the following loops on the stack |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 8966 | // -- that can lead to O(n!) time complexity. |
| 8967 | if (WalkingBEDominatingConds) |
| 8968 | return false; |
| 8969 | |
Sanjoy Das | 5d9a8cb | 2015-09-22 00:10:57 +0000 | [diff] [blame] | 8970 | SaveAndRestore<bool> ClearOnExit(WalkingBEDominatingConds, true); |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 8971 | |
Sanjoy Das | b174f9a | 2015-09-25 23:53:50 +0000 | [diff] [blame] | 8972 | // See if we can exploit a trip count to prove the predicate. |
| 8973 | const auto &BETakenInfo = getBackedgeTakenInfo(L); |
| 8974 | const SCEV *LatchBECount = BETakenInfo.getExact(Latch, this); |
| 8975 | if (LatchBECount != getCouldNotCompute()) { |
| 8976 | // We know that Latch branches back to the loop header exactly |
| 8977 | // LatchBECount times. This means the backdege condition at Latch is |
| 8978 | // equivalent to "{0,+,1} u< LatchBECount". |
| 8979 | Type *Ty = LatchBECount->getType(); |
| 8980 | auto NoWrapFlags = SCEV::NoWrapFlags(SCEV::FlagNUW | SCEV::FlagNW); |
| 8981 | const SCEV *LoopCounter = |
| 8982 | getAddRecExpr(getZero(Ty), getOne(Ty), L, NoWrapFlags); |
| 8983 | if (isImpliedCond(Pred, LHS, RHS, ICmpInst::ICMP_ULT, LoopCounter, |
| 8984 | LatchBECount)) |
| 8985 | return true; |
| 8986 | } |
| 8987 | |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 8988 | // Check conditions due to any @llvm.assume intrinsics. |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 8989 | for (auto &AssumeVH : AC.assumptions()) { |
| 8990 | if (!AssumeVH) |
| 8991 | continue; |
| 8992 | auto *CI = cast<CallInst>(AssumeVH); |
| 8993 | if (!DT.dominates(CI, Latch->getTerminator())) |
| 8994 | continue; |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 8995 | |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 8996 | if (isImpliedCond(Pred, LHS, RHS, CI->getArgOperand(0), false)) |
| 8997 | return true; |
| 8998 | } |
Piotr Padlewski | 0dde00d2 | 2015-09-09 20:47:30 +0000 | [diff] [blame] | 8999 | |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 9000 | // If the loop is not reachable from the entry block, we risk running into an |
| 9001 | // infinite loop as we walk up into the dom tree. These loops do not matter |
| 9002 | // anyway, so we just return a conservative answer when we see them. |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 9003 | if (!DT.isReachableFromEntry(L->getHeader())) |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 9004 | return false; |
| 9005 | |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 9006 | if (isImpliedViaGuard(Latch, Pred, LHS, RHS)) |
| 9007 | return true; |
| 9008 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 9009 | for (DomTreeNode *DTN = DT[Latch], *HeaderDTN = DT[L->getHeader()]; |
| 9010 | DTN != HeaderDTN; DTN = DTN->getIDom()) { |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 9011 | assert(DTN && "should reach the loop header before reaching the root!"); |
| 9012 | |
| 9013 | BasicBlock *BB = DTN->getBlock(); |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 9014 | if (isImpliedViaGuard(BB, Pred, LHS, RHS)) |
| 9015 | return true; |
| 9016 | |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 9017 | BasicBlock *PBB = BB->getSinglePredecessor(); |
| 9018 | if (!PBB) |
| 9019 | continue; |
| 9020 | |
| 9021 | BranchInst *ContinuePredicate = dyn_cast<BranchInst>(PBB->getTerminator()); |
| 9022 | if (!ContinuePredicate || !ContinuePredicate->isConditional()) |
| 9023 | continue; |
| 9024 | |
| 9025 | Value *Condition = ContinuePredicate->getCondition(); |
| 9026 | |
| 9027 | // If we have an edge `E` within the loop body that dominates the only |
| 9028 | // latch, the condition guarding `E` also guards the backedge. This |
| 9029 | // reasoning works only for loops with a single latch. |
| 9030 | |
| 9031 | BasicBlockEdge DominatingEdge(PBB, BB); |
| 9032 | if (DominatingEdge.isSingleEdge()) { |
| 9033 | // We're constructively (and conservatively) enumerating edges within the |
| 9034 | // loop body that dominate the latch. The dominator tree better agree |
| 9035 | // with us on this: |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 9036 | assert(DT.dominates(DominatingEdge, Latch) && "should be!"); |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 9037 | |
| 9038 | if (isImpliedCond(Pred, LHS, RHS, Condition, |
| 9039 | BB != ContinuePredicate->getSuccessor(0))) |
| 9040 | return true; |
| 9041 | } |
| 9042 | } |
| 9043 | |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 9044 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9045 | } |
| 9046 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9047 | bool |
Dan Gohman | b50349a | 2010-04-11 19:27:13 +0000 | [diff] [blame] | 9048 | ScalarEvolution::isLoopEntryGuardedByCond(const Loop *L, |
| 9049 | ICmpInst::Predicate Pred, |
| 9050 | const SCEV *LHS, const SCEV *RHS) { |
Dan Gohman | 9cf09f8 | 2009-05-18 16:03:58 +0000 | [diff] [blame] | 9051 | // Interpret a null as meaning no loop, where there is obviously no guard |
| 9052 | // (interprocedural conditions notwithstanding). |
| 9053 | if (!L) return false; |
| 9054 | |
Sanjoy Das | 401e631 | 2016-02-01 20:48:10 +0000 | [diff] [blame] | 9055 | if (isKnownPredicateViaConstantRanges(Pred, LHS, RHS)) |
| 9056 | return true; |
Sanjoy Das | 1f05c51 | 2014-10-10 21:22:34 +0000 | [diff] [blame] | 9057 | |
Dan Gohman | 8c77f1a | 2009-05-18 15:36:09 +0000 | [diff] [blame] | 9058 | // Starting at the loop predecessor, climb up the predecessor chain, as long |
| 9059 | // as there are predecessors that can be found that have unique successors |
Dan Gohman | f9081a2 | 2008-09-15 22:18:04 +0000 | [diff] [blame] | 9060 | // leading to the original header. |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 9061 | for (std::pair<BasicBlock *, BasicBlock *> |
Dan Gohman | 75c6b0b | 2010-06-22 23:43:28 +0000 | [diff] [blame] | 9062 | Pair(L->getLoopPredecessor(), L->getHeader()); |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 9063 | Pair.first; |
| 9064 | Pair = getPredecessorWithUniqueSuccessorForBB(Pair.first)) { |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 9065 | |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 9066 | if (isImpliedViaGuard(Pair.first, Pred, LHS, RHS)) |
| 9067 | return true; |
| 9068 | |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 9069 | BranchInst *LoopEntryPredicate = |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 9070 | dyn_cast<BranchInst>(Pair.first->getTerminator()); |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 9071 | if (!LoopEntryPredicate || |
| 9072 | LoopEntryPredicate->isUnconditional()) |
| 9073 | continue; |
| 9074 | |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9075 | if (isImpliedCond(Pred, LHS, RHS, |
| 9076 | LoopEntryPredicate->getCondition(), |
Dan Gohman | 4e3c113 | 2010-04-15 16:19:08 +0000 | [diff] [blame] | 9077 | LoopEntryPredicate->getSuccessor(0) != Pair.second)) |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 9078 | return true; |
Nick Lewycky | b5688cc | 2008-07-12 07:41:32 +0000 | [diff] [blame] | 9079 | } |
| 9080 | |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 9081 | // Check conditions due to any @llvm.assume intrinsics. |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 9082 | for (auto &AssumeVH : AC.assumptions()) { |
| 9083 | if (!AssumeVH) |
| 9084 | continue; |
| 9085 | auto *CI = cast<CallInst>(AssumeVH); |
| 9086 | if (!DT.dominates(CI, L->getHeader())) |
| 9087 | continue; |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 9088 | |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 9089 | if (isImpliedCond(Pred, LHS, RHS, CI->getArgOperand(0), false)) |
| 9090 | return true; |
| 9091 | } |
Hal Finkel | cebf0cc | 2014-09-07 21:37:59 +0000 | [diff] [blame] | 9092 | |
Dan Gohman | 2a62fd9 | 2008-08-12 20:17:31 +0000 | [diff] [blame] | 9093 | return false; |
Nick Lewycky | b5688cc | 2008-07-12 07:41:32 +0000 | [diff] [blame] | 9094 | } |
| 9095 | |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9096 | bool ScalarEvolution::isImpliedCond(ICmpInst::Predicate Pred, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9097 | const SCEV *LHS, const SCEV *RHS, |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9098 | Value *FoundCondValue, |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9099 | bool Inverse) { |
Sanjoy Das | c46bceb | 2016-09-27 18:01:42 +0000 | [diff] [blame] | 9100 | if (!PendingLoopPredicates.insert(FoundCondValue).second) |
Andrew Trick | 7fa4e0f | 2012-05-19 00:48:25 +0000 | [diff] [blame] | 9101 | return false; |
| 9102 | |
Sanjoy Das | c46bceb | 2016-09-27 18:01:42 +0000 | [diff] [blame] | 9103 | auto ClearOnExit = |
| 9104 | make_scope_exit([&]() { PendingLoopPredicates.erase(FoundCondValue); }); |
| 9105 | |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 9106 | // Recursively handle And and Or conditions. |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9107 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(FoundCondValue)) { |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 9108 | if (BO->getOpcode() == Instruction::And) { |
| 9109 | if (!Inverse) |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9110 | return isImpliedCond(Pred, LHS, RHS, BO->getOperand(0), Inverse) || |
| 9111 | isImpliedCond(Pred, LHS, RHS, BO->getOperand(1), Inverse); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 9112 | } else if (BO->getOpcode() == Instruction::Or) { |
| 9113 | if (Inverse) |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9114 | return isImpliedCond(Pred, LHS, RHS, BO->getOperand(0), Inverse) || |
| 9115 | isImpliedCond(Pred, LHS, RHS, BO->getOperand(1), Inverse); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 9116 | } |
| 9117 | } |
| 9118 | |
Dan Gohman | e18c2d6 | 2010-08-10 23:46:30 +0000 | [diff] [blame] | 9119 | ICmpInst *ICI = dyn_cast<ICmpInst>(FoundCondValue); |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 9120 | if (!ICI) return false; |
| 9121 | |
Andrew Trick | fa59403 | 2012-11-29 18:35:13 +0000 | [diff] [blame] | 9122 | // Now that we found a conditional branch that dominates the loop or controls |
| 9123 | // the loop latch. Check to see if it is the comparison we are looking for. |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9124 | ICmpInst::Predicate FoundPred; |
| 9125 | if (Inverse) |
| 9126 | FoundPred = ICI->getInversePredicate(); |
| 9127 | else |
| 9128 | FoundPred = ICI->getPredicate(); |
| 9129 | |
| 9130 | const SCEV *FoundLHS = getSCEV(ICI->getOperand(0)); |
| 9131 | const SCEV *FoundRHS = getSCEV(ICI->getOperand(1)); |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9132 | |
Sanjoy Das | df1635d | 2015-09-25 19:59:52 +0000 | [diff] [blame] | 9133 | return isImpliedCond(Pred, LHS, RHS, FoundPred, FoundLHS, FoundRHS); |
| 9134 | } |
| 9135 | |
| 9136 | bool ScalarEvolution::isImpliedCond(ICmpInst::Predicate Pred, const SCEV *LHS, |
| 9137 | const SCEV *RHS, |
| 9138 | ICmpInst::Predicate FoundPred, |
| 9139 | const SCEV *FoundLHS, |
| 9140 | const SCEV *FoundRHS) { |
Sanjoy Das | 1459883 | 2015-03-26 17:28:26 +0000 | [diff] [blame] | 9141 | // Balance the types. |
| 9142 | if (getTypeSizeInBits(LHS->getType()) < |
| 9143 | getTypeSizeInBits(FoundLHS->getType())) { |
| 9144 | if (CmpInst::isSigned(Pred)) { |
| 9145 | LHS = getSignExtendExpr(LHS, FoundLHS->getType()); |
| 9146 | RHS = getSignExtendExpr(RHS, FoundLHS->getType()); |
| 9147 | } else { |
| 9148 | LHS = getZeroExtendExpr(LHS, FoundLHS->getType()); |
| 9149 | RHS = getZeroExtendExpr(RHS, FoundLHS->getType()); |
| 9150 | } |
| 9151 | } else if (getTypeSizeInBits(LHS->getType()) > |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9152 | getTypeSizeInBits(FoundLHS->getType())) { |
Stepan Dyatkovskiy | 431993b | 2014-01-09 12:26:12 +0000 | [diff] [blame] | 9153 | if (CmpInst::isSigned(FoundPred)) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9154 | FoundLHS = getSignExtendExpr(FoundLHS, LHS->getType()); |
| 9155 | FoundRHS = getSignExtendExpr(FoundRHS, LHS->getType()); |
| 9156 | } else { |
| 9157 | FoundLHS = getZeroExtendExpr(FoundLHS, LHS->getType()); |
| 9158 | FoundRHS = getZeroExtendExpr(FoundRHS, LHS->getType()); |
| 9159 | } |
| 9160 | } |
| 9161 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9162 | // Canonicalize the query to match the way instcombine will have |
| 9163 | // canonicalized the comparison. |
Dan Gohman | 3673aa1 | 2010-04-24 01:34:53 +0000 | [diff] [blame] | 9164 | if (SimplifyICmpOperands(Pred, LHS, RHS)) |
| 9165 | if (LHS == RHS) |
Dan Gohman | b5025c7 | 2010-05-03 18:00:24 +0000 | [diff] [blame] | 9166 | return CmpInst::isTrueWhenEqual(Pred); |
Benjamin Kramer | ba11a98 | 2012-11-29 19:07:57 +0000 | [diff] [blame] | 9167 | if (SimplifyICmpOperands(FoundPred, FoundLHS, FoundRHS)) |
| 9168 | if (FoundLHS == FoundRHS) |
| 9169 | return CmpInst::isFalseWhenEqual(FoundPred); |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9170 | |
| 9171 | // Check to see if we can make the LHS or RHS match. |
| 9172 | if (LHS == FoundRHS || RHS == FoundLHS) { |
| 9173 | if (isa<SCEVConstant>(RHS)) { |
| 9174 | std::swap(FoundLHS, FoundRHS); |
| 9175 | FoundPred = ICmpInst::getSwappedPredicate(FoundPred); |
| 9176 | } else { |
| 9177 | std::swap(LHS, RHS); |
| 9178 | Pred = ICmpInst::getSwappedPredicate(Pred); |
| 9179 | } |
| 9180 | } |
| 9181 | |
| 9182 | // Check whether the found predicate is the same as the desired predicate. |
| 9183 | if (FoundPred == Pred) |
| 9184 | return isImpliedCondOperands(Pred, LHS, RHS, FoundLHS, FoundRHS); |
| 9185 | |
| 9186 | // Check whether swapping the found predicate makes it the same as the |
| 9187 | // desired predicate. |
| 9188 | if (ICmpInst::getSwappedPredicate(FoundPred) == Pred) { |
| 9189 | if (isa<SCEVConstant>(RHS)) |
| 9190 | return isImpliedCondOperands(Pred, LHS, RHS, FoundRHS, FoundLHS); |
| 9191 | else |
| 9192 | return isImpliedCondOperands(ICmpInst::getSwappedPredicate(Pred), |
| 9193 | RHS, LHS, FoundLHS, FoundRHS); |
| 9194 | } |
| 9195 | |
Sanjoy Das | 6e78b17 | 2015-10-22 19:57:34 +0000 | [diff] [blame] | 9196 | // Unsigned comparison is the same as signed comparison when both the operands |
| 9197 | // are non-negative. |
| 9198 | if (CmpInst::isUnsigned(FoundPred) && |
| 9199 | CmpInst::getSignedPredicate(FoundPred) == Pred && |
| 9200 | isKnownNonNegative(FoundLHS) && isKnownNonNegative(FoundRHS)) |
| 9201 | return isImpliedCondOperands(Pred, LHS, RHS, FoundLHS, FoundRHS); |
| 9202 | |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 9203 | // Check if we can make progress by sharpening ranges. |
| 9204 | if (FoundPred == ICmpInst::ICMP_NE && |
| 9205 | (isa<SCEVConstant>(FoundLHS) || isa<SCEVConstant>(FoundRHS))) { |
| 9206 | |
| 9207 | const SCEVConstant *C = nullptr; |
| 9208 | const SCEV *V = nullptr; |
| 9209 | |
| 9210 | if (isa<SCEVConstant>(FoundLHS)) { |
| 9211 | C = cast<SCEVConstant>(FoundLHS); |
| 9212 | V = FoundRHS; |
| 9213 | } else { |
| 9214 | C = cast<SCEVConstant>(FoundRHS); |
| 9215 | V = FoundLHS; |
| 9216 | } |
| 9217 | |
| 9218 | // The guarding predicate tells us that C != V. If the known range |
| 9219 | // of V is [C, t), we can sharpen the range to [C + 1, t). The |
| 9220 | // range we consider has to correspond to same signedness as the |
| 9221 | // predicate we're interested in folding. |
| 9222 | |
| 9223 | APInt Min = ICmpInst::isSigned(Pred) ? |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9224 | getSignedRangeMin(V) : getUnsignedRangeMin(V); |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 9225 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 9226 | if (Min == C->getAPInt()) { |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 9227 | // Given (V >= Min && V != Min) we conclude V >= (Min + 1). |
| 9228 | // This is true even if (Min + 1) wraps around -- in case of |
| 9229 | // wraparound, (Min + 1) < Min, so (V >= Min => V >= (Min + 1)). |
| 9230 | |
| 9231 | APInt SharperMin = Min + 1; |
| 9232 | |
| 9233 | switch (Pred) { |
| 9234 | case ICmpInst::ICMP_SGE: |
| 9235 | case ICmpInst::ICMP_UGE: |
| 9236 | // We know V `Pred` SharperMin. If this implies LHS `Pred` |
| 9237 | // RHS, we're done. |
| 9238 | if (isImpliedCondOperands(Pred, LHS, RHS, V, |
| 9239 | getConstant(SharperMin))) |
| 9240 | return true; |
Galina Kistanova | 8514dd5 | 2017-05-31 22:09:46 +0000 | [diff] [blame] | 9241 | LLVM_FALLTHROUGH; |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 9242 | |
| 9243 | case ICmpInst::ICMP_SGT: |
| 9244 | case ICmpInst::ICMP_UGT: |
| 9245 | // We know from the range information that (V `Pred` Min || |
| 9246 | // V == Min). We know from the guarding condition that !(V |
| 9247 | // == Min). This gives us |
| 9248 | // |
| 9249 | // V `Pred` Min || V == Min && !(V == Min) |
| 9250 | // => V `Pred` Min |
| 9251 | // |
| 9252 | // If V `Pred` Min implies LHS `Pred` RHS, we're done. |
| 9253 | |
| 9254 | if (isImpliedCondOperands(Pred, LHS, RHS, V, getConstant(Min))) |
| 9255 | return true; |
Galina Kistanova | 8514dd5 | 2017-05-31 22:09:46 +0000 | [diff] [blame] | 9256 | LLVM_FALLTHROUGH; |
Sanjoy Das | c5676df | 2014-11-13 00:00:58 +0000 | [diff] [blame] | 9257 | |
| 9258 | default: |
| 9259 | // No change |
| 9260 | break; |
| 9261 | } |
| 9262 | } |
| 9263 | } |
| 9264 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9265 | // Check whether the actual condition is beyond sufficient. |
| 9266 | if (FoundPred == ICmpInst::ICMP_EQ) |
| 9267 | if (ICmpInst::isTrueWhenEqual(Pred)) |
| 9268 | if (isImpliedCondOperands(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 9269 | return true; |
| 9270 | if (Pred == ICmpInst::ICMP_NE) |
| 9271 | if (!ICmpInst::isTrueWhenEqual(FoundPred)) |
| 9272 | if (isImpliedCondOperands(FoundPred, LHS, RHS, FoundLHS, FoundRHS)) |
| 9273 | return true; |
| 9274 | |
| 9275 | // Otherwise assume the worst. |
| 9276 | return false; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9277 | } |
| 9278 | |
Sanjoy Das | 1ed6910 | 2015-10-13 02:53:27 +0000 | [diff] [blame] | 9279 | bool ScalarEvolution::splitBinaryAdd(const SCEV *Expr, |
| 9280 | const SCEV *&L, const SCEV *&R, |
| 9281 | SCEV::NoWrapFlags &Flags) { |
| 9282 | const auto *AE = dyn_cast<SCEVAddExpr>(Expr); |
| 9283 | if (!AE || AE->getNumOperands() != 2) |
| 9284 | return false; |
| 9285 | |
| 9286 | L = AE->getOperand(0); |
| 9287 | R = AE->getOperand(1); |
| 9288 | Flags = AE->getNoWrapFlags(); |
| 9289 | return true; |
| 9290 | } |
| 9291 | |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9292 | Optional<APInt> ScalarEvolution::computeConstantDifference(const SCEV *More, |
| 9293 | const SCEV *Less) { |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9294 | // We avoid subtracting expressions here because this function is usually |
| 9295 | // fairly deep in the call stack (i.e. is called many times). |
| 9296 | |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9297 | if (isa<SCEVAddRecExpr>(Less) && isa<SCEVAddRecExpr>(More)) { |
| 9298 | const auto *LAR = cast<SCEVAddRecExpr>(Less); |
| 9299 | const auto *MAR = cast<SCEVAddRecExpr>(More); |
| 9300 | |
| 9301 | if (LAR->getLoop() != MAR->getLoop()) |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9302 | return None; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9303 | |
| 9304 | // We look at affine expressions only; not for correctness but to keep |
| 9305 | // getStepRecurrence cheap. |
| 9306 | if (!LAR->isAffine() || !MAR->isAffine()) |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9307 | return None; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9308 | |
Sanjoy Das | 1ed6910 | 2015-10-13 02:53:27 +0000 | [diff] [blame] | 9309 | if (LAR->getStepRecurrence(*this) != MAR->getStepRecurrence(*this)) |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9310 | return None; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9311 | |
| 9312 | Less = LAR->getStart(); |
| 9313 | More = MAR->getStart(); |
| 9314 | |
| 9315 | // fall through |
| 9316 | } |
| 9317 | |
| 9318 | if (isa<SCEVConstant>(Less) && isa<SCEVConstant>(More)) { |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 9319 | const auto &M = cast<SCEVConstant>(More)->getAPInt(); |
| 9320 | const auto &L = cast<SCEVConstant>(Less)->getAPInt(); |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9321 | return M - L; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9322 | } |
| 9323 | |
| 9324 | const SCEV *L, *R; |
Sanjoy Das | 1ed6910 | 2015-10-13 02:53:27 +0000 | [diff] [blame] | 9325 | SCEV::NoWrapFlags Flags; |
| 9326 | if (splitBinaryAdd(Less, L, R, Flags)) |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9327 | if (const auto *LC = dyn_cast<SCEVConstant>(L)) |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9328 | if (R == More) |
| 9329 | return -(LC->getAPInt()); |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9330 | |
Sanjoy Das | 1ed6910 | 2015-10-13 02:53:27 +0000 | [diff] [blame] | 9331 | if (splitBinaryAdd(More, L, R, Flags)) |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9332 | if (const auto *LC = dyn_cast<SCEVConstant>(L)) |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9333 | if (R == Less) |
| 9334 | return LC->getAPInt(); |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9335 | |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9336 | return None; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9337 | } |
| 9338 | |
| 9339 | bool ScalarEvolution::isImpliedCondOperandsViaNoOverflow( |
| 9340 | ICmpInst::Predicate Pred, const SCEV *LHS, const SCEV *RHS, |
| 9341 | const SCEV *FoundLHS, const SCEV *FoundRHS) { |
| 9342 | if (Pred != CmpInst::ICMP_SLT && Pred != CmpInst::ICMP_ULT) |
| 9343 | return false; |
| 9344 | |
| 9345 | const auto *AddRecLHS = dyn_cast<SCEVAddRecExpr>(LHS); |
| 9346 | if (!AddRecLHS) |
| 9347 | return false; |
| 9348 | |
| 9349 | const auto *AddRecFoundLHS = dyn_cast<SCEVAddRecExpr>(FoundLHS); |
| 9350 | if (!AddRecFoundLHS) |
| 9351 | return false; |
| 9352 | |
| 9353 | // We'd like to let SCEV reason about control dependencies, so we constrain |
| 9354 | // both the inequalities to be about add recurrences on the same loop. This |
| 9355 | // way we can use isLoopEntryGuardedByCond later. |
| 9356 | |
| 9357 | const Loop *L = AddRecFoundLHS->getLoop(); |
| 9358 | if (L != AddRecLHS->getLoop()) |
| 9359 | return false; |
| 9360 | |
| 9361 | // FoundLHS u< FoundRHS u< -C => (FoundLHS + C) u< (FoundRHS + C) ... (1) |
| 9362 | // |
| 9363 | // FoundLHS s< FoundRHS s< INT_MIN - C => (FoundLHS + C) s< (FoundRHS + C) |
| 9364 | // ... (2) |
| 9365 | // |
| 9366 | // Informal proof for (2), assuming (1) [*]: |
| 9367 | // |
| 9368 | // We'll also assume (A s< B) <=> ((A + INT_MIN) u< (B + INT_MIN)) ... (3)[**] |
| 9369 | // |
| 9370 | // Then |
| 9371 | // |
| 9372 | // FoundLHS s< FoundRHS s< INT_MIN - C |
| 9373 | // <=> (FoundLHS + INT_MIN) u< (FoundRHS + INT_MIN) u< -C [ using (3) ] |
| 9374 | // <=> (FoundLHS + INT_MIN + C) u< (FoundRHS + INT_MIN + C) [ using (1) ] |
| 9375 | // <=> (FoundLHS + INT_MIN + C + INT_MIN) s< |
| 9376 | // (FoundRHS + INT_MIN + C + INT_MIN) [ using (3) ] |
| 9377 | // <=> FoundLHS + C s< FoundRHS + C |
| 9378 | // |
| 9379 | // [*]: (1) can be proved by ruling out overflow. |
| 9380 | // |
| 9381 | // [**]: This can be proved by analyzing all the four possibilities: |
| 9382 | // (A s< 0, B s< 0), (A s< 0, B s>= 0), (A s>= 0, B s< 0) and |
| 9383 | // (A s>= 0, B s>= 0). |
| 9384 | // |
| 9385 | // Note: |
| 9386 | // Despite (2), "FoundRHS s< INT_MIN - C" does not mean that "FoundRHS + C" |
| 9387 | // will not sign underflow. For instance, say FoundLHS = (i8 -128), FoundRHS |
| 9388 | // = (i8 -127) and C = (i8 -100). Then INT_MIN - C = (i8 -28), and FoundRHS |
| 9389 | // s< (INT_MIN - C). Lack of sign overflow / underflow in "FoundRHS + C" is |
| 9390 | // neither necessary nor sufficient to prove "(FoundLHS + C) s< (FoundRHS + |
| 9391 | // C)". |
| 9392 | |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9393 | Optional<APInt> LDiff = computeConstantDifference(LHS, FoundLHS); |
| 9394 | Optional<APInt> RDiff = computeConstantDifference(RHS, FoundRHS); |
| 9395 | if (!LDiff || !RDiff || *LDiff != *RDiff) |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9396 | return false; |
| 9397 | |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9398 | if (LDiff->isMinValue()) |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9399 | return true; |
| 9400 | |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9401 | APInt FoundRHSLimit; |
| 9402 | |
| 9403 | if (Pred == CmpInst::ICMP_ULT) { |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9404 | FoundRHSLimit = -(*RDiff); |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9405 | } else { |
| 9406 | assert(Pred == CmpInst::ICMP_SLT && "Checked above!"); |
Sanjoy Das | 0b1af85 | 2016-07-23 00:28:56 +0000 | [diff] [blame] | 9407 | FoundRHSLimit = APInt::getSignedMinValue(getTypeSizeInBits(RHS->getType())) - *RDiff; |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9408 | } |
| 9409 | |
| 9410 | // Try to prove (1) or (2), as needed. |
| 9411 | return isLoopEntryGuardedByCond(L, Pred, FoundRHS, |
| 9412 | getConstant(FoundRHSLimit)); |
| 9413 | } |
| 9414 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9415 | bool ScalarEvolution::isImpliedCondOperands(ICmpInst::Predicate Pred, |
| 9416 | const SCEV *LHS, const SCEV *RHS, |
| 9417 | const SCEV *FoundLHS, |
| 9418 | const SCEV *FoundRHS) { |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9419 | if (isImpliedCondOperandsViaRanges(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 9420 | return true; |
| 9421 | |
Sanjoy Das | 96709c4 | 2015-09-25 23:53:45 +0000 | [diff] [blame] | 9422 | if (isImpliedCondOperandsViaNoOverflow(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 9423 | return true; |
| 9424 | |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9425 | return isImpliedCondOperandsHelper(Pred, LHS, RHS, |
| 9426 | FoundLHS, FoundRHS) || |
| 9427 | // ~x < ~y --> x > y |
| 9428 | isImpliedCondOperandsHelper(Pred, LHS, RHS, |
| 9429 | getNotSCEV(FoundRHS), |
| 9430 | getNotSCEV(FoundLHS)); |
| 9431 | } |
| 9432 | |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9433 | /// If Expr computes ~A, return A else return nullptr |
| 9434 | static const SCEV *MatchNotExpr(const SCEV *Expr) { |
| 9435 | const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Expr); |
Sanjoy Das | 16e7ff1 | 2015-10-13 23:28:31 +0000 | [diff] [blame] | 9436 | if (!Add || Add->getNumOperands() != 2 || |
| 9437 | !Add->getOperand(0)->isAllOnesValue()) |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9438 | return nullptr; |
| 9439 | |
| 9440 | const SCEVMulExpr *AddRHS = dyn_cast<SCEVMulExpr>(Add->getOperand(1)); |
Sanjoy Das | 16e7ff1 | 2015-10-13 23:28:31 +0000 | [diff] [blame] | 9441 | if (!AddRHS || AddRHS->getNumOperands() != 2 || |
| 9442 | !AddRHS->getOperand(0)->isAllOnesValue()) |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9443 | return nullptr; |
| 9444 | |
| 9445 | return AddRHS->getOperand(1); |
| 9446 | } |
| 9447 | |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9448 | /// Is MaybeMaxExpr an SMax or UMax of Candidate and some other values? |
| 9449 | template<typename MaxExprType> |
| 9450 | static bool IsMaxConsistingOf(const SCEV *MaybeMaxExpr, |
| 9451 | const SCEV *Candidate) { |
| 9452 | const MaxExprType *MaxExpr = dyn_cast<MaxExprType>(MaybeMaxExpr); |
| 9453 | if (!MaxExpr) return false; |
| 9454 | |
Sanjoy Das | 347d272 | 2015-12-01 07:49:27 +0000 | [diff] [blame] | 9455 | return find(MaxExpr->operands(), Candidate) != MaxExpr->op_end(); |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9456 | } |
| 9457 | |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9458 | /// Is MaybeMinExpr an SMin or UMin of Candidate and some other values? |
| 9459 | template<typename MaxExprType> |
| 9460 | static bool IsMinConsistingOf(ScalarEvolution &SE, |
| 9461 | const SCEV *MaybeMinExpr, |
| 9462 | const SCEV *Candidate) { |
| 9463 | const SCEV *MaybeMaxExpr = MatchNotExpr(MaybeMinExpr); |
| 9464 | if (!MaybeMaxExpr) |
| 9465 | return false; |
| 9466 | |
| 9467 | return IsMaxConsistingOf<MaxExprType>(MaybeMaxExpr, SE.getNotSCEV(Candidate)); |
| 9468 | } |
| 9469 | |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 9470 | static bool IsKnownPredicateViaAddRecStart(ScalarEvolution &SE, |
| 9471 | ICmpInst::Predicate Pred, |
| 9472 | const SCEV *LHS, const SCEV *RHS) { |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 9473 | // If both sides are affine addrecs for the same loop, with equal |
| 9474 | // steps, and we know the recurrences don't wrap, then we only |
| 9475 | // need to check the predicate on the starting values. |
| 9476 | |
| 9477 | if (!ICmpInst::isRelational(Pred)) |
| 9478 | return false; |
| 9479 | |
| 9480 | const SCEVAddRecExpr *LAR = dyn_cast<SCEVAddRecExpr>(LHS); |
| 9481 | if (!LAR) |
| 9482 | return false; |
| 9483 | const SCEVAddRecExpr *RAR = dyn_cast<SCEVAddRecExpr>(RHS); |
| 9484 | if (!RAR) |
| 9485 | return false; |
| 9486 | if (LAR->getLoop() != RAR->getLoop()) |
| 9487 | return false; |
| 9488 | if (!LAR->isAffine() || !RAR->isAffine()) |
| 9489 | return false; |
| 9490 | |
| 9491 | if (LAR->getStepRecurrence(SE) != RAR->getStepRecurrence(SE)) |
| 9492 | return false; |
| 9493 | |
Hal Finkel | ff08a2e | 2015-08-19 17:26:07 +0000 | [diff] [blame] | 9494 | SCEV::NoWrapFlags NW = ICmpInst::isSigned(Pred) ? |
| 9495 | SCEV::FlagNSW : SCEV::FlagNUW; |
| 9496 | if (!LAR->getNoWrapFlags(NW) || !RAR->getNoWrapFlags(NW)) |
Hal Finkel | a8d205f | 2015-08-19 01:51:51 +0000 | [diff] [blame] | 9497 | return false; |
| 9498 | |
| 9499 | return SE.isKnownPredicate(Pred, LAR->getStart(), RAR->getStart()); |
| 9500 | } |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9501 | |
| 9502 | /// Is LHS `Pred` RHS true on the virtue of LHS or RHS being a Min or Max |
| 9503 | /// expression? |
| 9504 | static bool IsKnownPredicateViaMinOrMax(ScalarEvolution &SE, |
| 9505 | ICmpInst::Predicate Pred, |
| 9506 | const SCEV *LHS, const SCEV *RHS) { |
| 9507 | switch (Pred) { |
| 9508 | default: |
| 9509 | return false; |
| 9510 | |
| 9511 | case ICmpInst::ICMP_SGE: |
| 9512 | std::swap(LHS, RHS); |
Justin Bogner | cd1d5aa | 2016-08-17 20:30:52 +0000 | [diff] [blame] | 9513 | LLVM_FALLTHROUGH; |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9514 | case ICmpInst::ICMP_SLE: |
| 9515 | return |
| 9516 | // min(A, ...) <= A |
| 9517 | IsMinConsistingOf<SCEVSMaxExpr>(SE, LHS, RHS) || |
| 9518 | // A <= max(A, ...) |
| 9519 | IsMaxConsistingOf<SCEVSMaxExpr>(RHS, LHS); |
| 9520 | |
| 9521 | case ICmpInst::ICMP_UGE: |
| 9522 | std::swap(LHS, RHS); |
Justin Bogner | cd1d5aa | 2016-08-17 20:30:52 +0000 | [diff] [blame] | 9523 | LLVM_FALLTHROUGH; |
Sanjoy Das | 4555b6d | 2014-12-15 22:50:15 +0000 | [diff] [blame] | 9524 | case ICmpInst::ICMP_ULE: |
| 9525 | return |
| 9526 | // min(A, ...) <= A |
| 9527 | IsMinConsistingOf<SCEVUMaxExpr>(SE, LHS, RHS) || |
| 9528 | // A <= max(A, ...) |
| 9529 | IsMaxConsistingOf<SCEVUMaxExpr>(RHS, LHS); |
| 9530 | } |
| 9531 | |
| 9532 | llvm_unreachable("covered switch fell through?!"); |
| 9533 | } |
| 9534 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9535 | bool ScalarEvolution::isImpliedViaOperations(ICmpInst::Predicate Pred, |
| 9536 | const SCEV *LHS, const SCEV *RHS, |
| 9537 | const SCEV *FoundLHS, |
| 9538 | const SCEV *FoundRHS, |
| 9539 | unsigned Depth) { |
| 9540 | assert(getTypeSizeInBits(LHS->getType()) == |
| 9541 | getTypeSizeInBits(RHS->getType()) && |
| 9542 | "LHS and RHS have different sizes?"); |
| 9543 | assert(getTypeSizeInBits(FoundLHS->getType()) == |
| 9544 | getTypeSizeInBits(FoundRHS->getType()) && |
| 9545 | "FoundLHS and FoundRHS have different sizes?"); |
| 9546 | // We want to avoid hurting the compile time with analysis of too big trees. |
| 9547 | if (Depth > MaxSCEVOperationsImplicationDepth) |
| 9548 | return false; |
| 9549 | // We only want to work with ICMP_SGT comparison so far. |
| 9550 | // TODO: Extend to ICMP_UGT? |
| 9551 | if (Pred == ICmpInst::ICMP_SLT) { |
| 9552 | Pred = ICmpInst::ICMP_SGT; |
| 9553 | std::swap(LHS, RHS); |
| 9554 | std::swap(FoundLHS, FoundRHS); |
| 9555 | } |
| 9556 | if (Pred != ICmpInst::ICMP_SGT) |
| 9557 | return false; |
| 9558 | |
| 9559 | auto GetOpFromSExt = [&](const SCEV *S) { |
| 9560 | if (auto *Ext = dyn_cast<SCEVSignExtendExpr>(S)) |
| 9561 | return Ext->getOperand(); |
| 9562 | // TODO: If S is a SCEVConstant then you can cheaply "strip" the sext off |
| 9563 | // the constant in some cases. |
| 9564 | return S; |
| 9565 | }; |
| 9566 | |
| 9567 | // Acquire values from extensions. |
| 9568 | auto *OrigFoundLHS = FoundLHS; |
| 9569 | LHS = GetOpFromSExt(LHS); |
| 9570 | FoundLHS = GetOpFromSExt(FoundLHS); |
| 9571 | |
| 9572 | // Is the SGT predicate can be proved trivially or using the found context. |
| 9573 | auto IsSGTViaContext = [&](const SCEV *S1, const SCEV *S2) { |
| 9574 | return isKnownViaSimpleReasoning(ICmpInst::ICMP_SGT, S1, S2) || |
| 9575 | isImpliedViaOperations(ICmpInst::ICMP_SGT, S1, S2, OrigFoundLHS, |
| 9576 | FoundRHS, Depth + 1); |
| 9577 | }; |
| 9578 | |
| 9579 | if (auto *LHSAddExpr = dyn_cast<SCEVAddExpr>(LHS)) { |
| 9580 | // We want to avoid creation of any new non-constant SCEV. Since we are |
| 9581 | // going to compare the operands to RHS, we should be certain that we don't |
| 9582 | // need any size extensions for this. So let's decline all cases when the |
| 9583 | // sizes of types of LHS and RHS do not match. |
| 9584 | // TODO: Maybe try to get RHS from sext to catch more cases? |
| 9585 | if (getTypeSizeInBits(LHS->getType()) != getTypeSizeInBits(RHS->getType())) |
| 9586 | return false; |
| 9587 | |
| 9588 | // Should not overflow. |
| 9589 | if (!LHSAddExpr->hasNoSignedWrap()) |
| 9590 | return false; |
| 9591 | |
| 9592 | auto *LL = LHSAddExpr->getOperand(0); |
| 9593 | auto *LR = LHSAddExpr->getOperand(1); |
| 9594 | auto *MinusOne = getNegativeSCEV(getOne(RHS->getType())); |
| 9595 | |
| 9596 | // Checks that S1 >= 0 && S2 > RHS, trivially or using the found context. |
| 9597 | auto IsSumGreaterThanRHS = [&](const SCEV *S1, const SCEV *S2) { |
| 9598 | return IsSGTViaContext(S1, MinusOne) && IsSGTViaContext(S2, RHS); |
| 9599 | }; |
| 9600 | // Try to prove the following rule: |
| 9601 | // (LHS = LL + LR) && (LL >= 0) && (LR > RHS) => (LHS > RHS). |
| 9602 | // (LHS = LL + LR) && (LR >= 0) && (LL > RHS) => (LHS > RHS). |
| 9603 | if (IsSumGreaterThanRHS(LL, LR) || IsSumGreaterThanRHS(LR, LL)) |
| 9604 | return true; |
| 9605 | } else if (auto *LHSUnknownExpr = dyn_cast<SCEVUnknown>(LHS)) { |
| 9606 | Value *LL, *LR; |
| 9607 | // FIXME: Once we have SDiv implemented, we can get rid of this matching. |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 9608 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9609 | using namespace llvm::PatternMatch; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 9610 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9611 | if (match(LHSUnknownExpr->getValue(), m_SDiv(m_Value(LL), m_Value(LR)))) { |
| 9612 | // Rules for division. |
| 9613 | // We are going to perform some comparisons with Denominator and its |
| 9614 | // derivative expressions. In general case, creating a SCEV for it may |
| 9615 | // lead to a complex analysis of the entire graph, and in particular it |
| 9616 | // can request trip count recalculation for the same loop. This would |
| 9617 | // cache as SCEVCouldNotCompute to avoid the infinite recursion. To avoid |
| 9618 | // this, we only want to create SCEVs that are constants in this section. |
| 9619 | // So we bail if Denominator is not a constant. |
| 9620 | if (!isa<ConstantInt>(LR)) |
| 9621 | return false; |
| 9622 | |
| 9623 | auto *Denominator = cast<SCEVConstant>(getSCEV(LR)); |
| 9624 | |
| 9625 | // We want to make sure that LHS = FoundLHS / Denominator. If it is so, |
| 9626 | // then a SCEV for the numerator already exists and matches with FoundLHS. |
| 9627 | auto *Numerator = getExistingSCEV(LL); |
| 9628 | if (!Numerator || Numerator->getType() != FoundLHS->getType()) |
| 9629 | return false; |
| 9630 | |
| 9631 | // Make sure that the numerator matches with FoundLHS and the denominator |
| 9632 | // is positive. |
| 9633 | if (!HasSameValue(Numerator, FoundLHS) || !isKnownPositive(Denominator)) |
| 9634 | return false; |
| 9635 | |
| 9636 | auto *DTy = Denominator->getType(); |
| 9637 | auto *FRHSTy = FoundRHS->getType(); |
| 9638 | if (DTy->isPointerTy() != FRHSTy->isPointerTy()) |
| 9639 | // One of types is a pointer and another one is not. We cannot extend |
| 9640 | // them properly to a wider type, so let us just reject this case. |
| 9641 | // TODO: Usage of getEffectiveSCEVType for DTy, FRHSTy etc should help |
| 9642 | // to avoid this check. |
| 9643 | return false; |
| 9644 | |
| 9645 | // Given that: |
| 9646 | // FoundLHS > FoundRHS, LHS = FoundLHS / Denominator, Denominator > 0. |
| 9647 | auto *WTy = getWiderType(DTy, FRHSTy); |
| 9648 | auto *DenominatorExt = getNoopOrSignExtend(Denominator, WTy); |
| 9649 | auto *FoundRHSExt = getNoopOrSignExtend(FoundRHS, WTy); |
| 9650 | |
| 9651 | // Try to prove the following rule: |
| 9652 | // (FoundRHS > Denominator - 2) && (RHS <= 0) => (LHS > RHS). |
| 9653 | // For example, given that FoundLHS > 2. It means that FoundLHS is at |
| 9654 | // least 3. If we divide it by Denominator < 4, we will have at least 1. |
| 9655 | auto *DenomMinusTwo = getMinusSCEV(DenominatorExt, getConstant(WTy, 2)); |
| 9656 | if (isKnownNonPositive(RHS) && |
| 9657 | IsSGTViaContext(FoundRHSExt, DenomMinusTwo)) |
| 9658 | return true; |
| 9659 | |
| 9660 | // Try to prove the following rule: |
| 9661 | // (FoundRHS > -1 - Denominator) && (RHS < 0) => (LHS > RHS). |
| 9662 | // For example, given that FoundLHS > -3. Then FoundLHS is at least -2. |
| 9663 | // If we divide it by Denominator > 2, then: |
| 9664 | // 1. If FoundLHS is negative, then the result is 0. |
| 9665 | // 2. If FoundLHS is non-negative, then the result is non-negative. |
| 9666 | // Anyways, the result is non-negative. |
| 9667 | auto *MinusOne = getNegativeSCEV(getOne(WTy)); |
| 9668 | auto *NegDenomMinusOne = getMinusSCEV(MinusOne, DenominatorExt); |
| 9669 | if (isKnownNegative(RHS) && |
| 9670 | IsSGTViaContext(FoundRHSExt, NegDenomMinusOne)) |
| 9671 | return true; |
| 9672 | } |
| 9673 | } |
| 9674 | |
| 9675 | return false; |
| 9676 | } |
| 9677 | |
| 9678 | bool |
| 9679 | ScalarEvolution::isKnownViaSimpleReasoning(ICmpInst::Predicate Pred, |
| 9680 | const SCEV *LHS, const SCEV *RHS) { |
| 9681 | return isKnownPredicateViaConstantRanges(Pred, LHS, RHS) || |
| 9682 | IsKnownPredicateViaMinOrMax(*this, Pred, LHS, RHS) || |
| 9683 | IsKnownPredicateViaAddRecStart(*this, Pred, LHS, RHS) || |
| 9684 | isKnownPredicateViaNoOverflow(Pred, LHS, RHS); |
| 9685 | } |
| 9686 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9687 | bool |
Dan Gohman | 430f0cc | 2009-07-21 23:03:19 +0000 | [diff] [blame] | 9688 | ScalarEvolution::isImpliedCondOperandsHelper(ICmpInst::Predicate Pred, |
| 9689 | const SCEV *LHS, const SCEV *RHS, |
| 9690 | const SCEV *FoundLHS, |
| 9691 | const SCEV *FoundRHS) { |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9692 | switch (Pred) { |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 9693 | default: llvm_unreachable("Unexpected ICmpInst::Predicate value!"); |
| 9694 | case ICmpInst::ICMP_EQ: |
| 9695 | case ICmpInst::ICMP_NE: |
| 9696 | if (HasSameValue(LHS, FoundLHS) && HasSameValue(RHS, FoundRHS)) |
| 9697 | return true; |
| 9698 | break; |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9699 | case ICmpInst::ICMP_SLT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 9700 | case ICmpInst::ICMP_SLE: |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9701 | if (isKnownViaSimpleReasoning(ICmpInst::ICMP_SLE, LHS, FoundLHS) && |
| 9702 | isKnownViaSimpleReasoning(ICmpInst::ICMP_SGE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9703 | return true; |
| 9704 | break; |
| 9705 | case ICmpInst::ICMP_SGT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 9706 | case ICmpInst::ICMP_SGE: |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9707 | if (isKnownViaSimpleReasoning(ICmpInst::ICMP_SGE, LHS, FoundLHS) && |
| 9708 | isKnownViaSimpleReasoning(ICmpInst::ICMP_SLE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9709 | return true; |
| 9710 | break; |
| 9711 | case ICmpInst::ICMP_ULT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 9712 | case ICmpInst::ICMP_ULE: |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9713 | if (isKnownViaSimpleReasoning(ICmpInst::ICMP_ULE, LHS, FoundLHS) && |
| 9714 | isKnownViaSimpleReasoning(ICmpInst::ICMP_UGE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9715 | return true; |
| 9716 | break; |
| 9717 | case ICmpInst::ICMP_UGT: |
Dan Gohman | 8c129d7 | 2009-07-16 17:34:36 +0000 | [diff] [blame] | 9718 | case ICmpInst::ICMP_UGE: |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9719 | if (isKnownViaSimpleReasoning(ICmpInst::ICMP_UGE, LHS, FoundLHS) && |
| 9720 | isKnownViaSimpleReasoning(ICmpInst::ICMP_ULE, RHS, FoundRHS)) |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9721 | return true; |
| 9722 | break; |
| 9723 | } |
| 9724 | |
Max Kazantsev | 2e44d29 | 2017-03-31 12:05:30 +0000 | [diff] [blame] | 9725 | // Maybe it can be proved via operations? |
| 9726 | if (isImpliedViaOperations(Pred, LHS, RHS, FoundLHS, FoundRHS)) |
| 9727 | return true; |
| 9728 | |
Dan Gohman | e65c917 | 2009-07-13 21:35:55 +0000 | [diff] [blame] | 9729 | return false; |
Dan Gohman | f19aeec | 2009-06-24 01:18:18 +0000 | [diff] [blame] | 9730 | } |
| 9731 | |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9732 | bool ScalarEvolution::isImpliedCondOperandsViaRanges(ICmpInst::Predicate Pred, |
| 9733 | const SCEV *LHS, |
| 9734 | const SCEV *RHS, |
| 9735 | const SCEV *FoundLHS, |
| 9736 | const SCEV *FoundRHS) { |
| 9737 | if (!isa<SCEVConstant>(RHS) || !isa<SCEVConstant>(FoundRHS)) |
| 9738 | // The restriction on `FoundRHS` be lifted easily -- it exists only to |
| 9739 | // reduce the compile time impact of this optimization. |
| 9740 | return false; |
| 9741 | |
Sanjoy Das | a7d9ec8 | 2016-07-23 00:54:36 +0000 | [diff] [blame] | 9742 | Optional<APInt> Addend = computeConstantDifference(LHS, FoundLHS); |
Sanjoy Das | 095f5b2 | 2016-07-22 20:47:55 +0000 | [diff] [blame] | 9743 | if (!Addend) |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9744 | return false; |
| 9745 | |
Craig Topper | 8f26b79 | 2017-05-06 05:15:09 +0000 | [diff] [blame] | 9746 | const APInt &ConstFoundRHS = cast<SCEVConstant>(FoundRHS)->getAPInt(); |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9747 | |
| 9748 | // `FoundLHSRange` is the range we know `FoundLHS` to be in by virtue of the |
| 9749 | // antecedent "`FoundLHS` `Pred` `FoundRHS`". |
| 9750 | ConstantRange FoundLHSRange = |
| 9751 | ConstantRange::makeAllowedICmpRegion(Pred, ConstFoundRHS); |
| 9752 | |
Sanjoy Das | 095f5b2 | 2016-07-22 20:47:55 +0000 | [diff] [blame] | 9753 | // Since `LHS` is `FoundLHS` + `Addend`, we can compute a range for `LHS`: |
| 9754 | ConstantRange LHSRange = FoundLHSRange.add(ConstantRange(*Addend)); |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9755 | |
| 9756 | // We can also compute the range of values for `LHS` that satisfy the |
| 9757 | // consequent, "`LHS` `Pred` `RHS`": |
Craig Topper | 8f26b79 | 2017-05-06 05:15:09 +0000 | [diff] [blame] | 9758 | const APInt &ConstRHS = cast<SCEVConstant>(RHS)->getAPInt(); |
Sanjoy Das | cb8bca1 | 2015-03-18 00:41:29 +0000 | [diff] [blame] | 9759 | ConstantRange SatisfyingLHSRange = |
| 9760 | ConstantRange::makeSatisfyingICmpRegion(Pred, ConstRHS); |
| 9761 | |
| 9762 | // The antecedent implies the consequent if every value of `LHS` that |
| 9763 | // satisfies the antecedent also satisfies the consequent. |
| 9764 | return SatisfyingLHSRange.contains(LHSRange); |
| 9765 | } |
| 9766 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9767 | bool ScalarEvolution::doesIVOverflowOnLT(const SCEV *RHS, const SCEV *Stride, |
| 9768 | bool IsSigned, bool NoWrap) { |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9769 | assert(isKnownPositive(Stride) && "Positive stride expected!"); |
| 9770 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9771 | if (NoWrap) return false; |
Dan Gohman | 51aaf02 | 2010-01-26 04:40:18 +0000 | [diff] [blame] | 9772 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9773 | unsigned BitWidth = getTypeSizeInBits(RHS->getType()); |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 9774 | const SCEV *One = getOne(Stride->getType()); |
Andrew Trick | 2afa325 | 2011-03-09 17:29:58 +0000 | [diff] [blame] | 9775 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9776 | if (IsSigned) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9777 | APInt MaxRHS = getSignedRangeMax(RHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9778 | APInt MaxValue = APInt::getSignedMaxValue(BitWidth); |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9779 | APInt MaxStrideMinusOne = getSignedRangeMax(getMinusSCEV(Stride, One)); |
Andrew Trick | 2afa325 | 2011-03-09 17:29:58 +0000 | [diff] [blame] | 9780 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9781 | // SMaxRHS + SMaxStrideMinusOne > SMaxValue => overflow! |
Craig Topper | ef869ec | 2017-05-08 17:39:01 +0000 | [diff] [blame] | 9782 | return (std::move(MaxValue) - MaxStrideMinusOne).slt(MaxRHS); |
Dan Gohman | 36bad00 | 2009-09-17 18:05:20 +0000 | [diff] [blame] | 9783 | } |
Dan Gohman | 0104842 | 2009-06-21 23:46:38 +0000 | [diff] [blame] | 9784 | |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9785 | APInt MaxRHS = getUnsignedRangeMax(RHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9786 | APInt MaxValue = APInt::getMaxValue(BitWidth); |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9787 | APInt MaxStrideMinusOne = getUnsignedRangeMax(getMinusSCEV(Stride, One)); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9788 | |
| 9789 | // UMaxRHS + UMaxStrideMinusOne > UMaxValue => overflow! |
Craig Topper | ef869ec | 2017-05-08 17:39:01 +0000 | [diff] [blame] | 9790 | return (std::move(MaxValue) - MaxStrideMinusOne).ult(MaxRHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9791 | } |
| 9792 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9793 | bool ScalarEvolution::doesIVOverflowOnGT(const SCEV *RHS, const SCEV *Stride, |
| 9794 | bool IsSigned, bool NoWrap) { |
| 9795 | if (NoWrap) return false; |
| 9796 | |
| 9797 | unsigned BitWidth = getTypeSizeInBits(RHS->getType()); |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 9798 | const SCEV *One = getOne(Stride->getType()); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9799 | |
| 9800 | if (IsSigned) { |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9801 | APInt MinRHS = getSignedRangeMin(RHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9802 | APInt MinValue = APInt::getSignedMinValue(BitWidth); |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9803 | APInt MaxStrideMinusOne = getSignedRangeMax(getMinusSCEV(Stride, One)); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9804 | |
| 9805 | // SMinRHS - SMaxStrideMinusOne < SMinValue => overflow! |
Craig Topper | ef869ec | 2017-05-08 17:39:01 +0000 | [diff] [blame] | 9806 | return (std::move(MinValue) + MaxStrideMinusOne).sgt(MinRHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9807 | } |
| 9808 | |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9809 | APInt MinRHS = getUnsignedRangeMin(RHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9810 | APInt MinValue = APInt::getMinValue(BitWidth); |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 9811 | APInt MaxStrideMinusOne = getUnsignedRangeMax(getMinusSCEV(Stride, One)); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9812 | |
| 9813 | // UMinRHS - UMaxStrideMinusOne < UMinValue => overflow! |
Craig Topper | ef869ec | 2017-05-08 17:39:01 +0000 | [diff] [blame] | 9814 | return (std::move(MinValue) + MaxStrideMinusOne).ugt(MinRHS); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9815 | } |
| 9816 | |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 9817 | const SCEV *ScalarEvolution::computeBECount(const SCEV *Delta, const SCEV *Step, |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9818 | bool Equality) { |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 9819 | const SCEV *One = getOne(Step->getType()); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9820 | Delta = Equality ? getAddExpr(Delta, Step) |
| 9821 | : getAddExpr(Delta, getMinusSCEV(Step, One)); |
| 9822 | return getUDivExpr(Delta, Step); |
Dan Gohman | 0104842 | 2009-06-21 23:46:38 +0000 | [diff] [blame] | 9823 | } |
| 9824 | |
Anna Thomas | 79503c0 | 2017-10-16 17:47:17 +0000 | [diff] [blame] | 9825 | const SCEV *ScalarEvolution::computeMaxBECountForLT(const SCEV *Start, |
| 9826 | const SCEV *Stride, |
| 9827 | const SCEV *End, |
| 9828 | unsigned BitWidth, |
| 9829 | bool IsSigned) { |
Anna Thomas | a2ca902 | 2017-10-13 14:30:43 +0000 | [diff] [blame] | 9830 | |
| 9831 | assert(!isKnownNonPositive(Stride) && |
| 9832 | "Stride is expected strictly positive!"); |
| 9833 | // Calculate the maximum backedge count based on the range of values |
| 9834 | // permitted by Start, End, and Stride. |
| 9835 | const SCEV *MaxBECount; |
| 9836 | APInt MinStart = |
| 9837 | IsSigned ? getSignedRangeMin(Start) : getUnsignedRangeMin(Start); |
| 9838 | |
Sanjoy Das | 8499ebf | 2017-10-25 21:41:00 +0000 | [diff] [blame] | 9839 | APInt StrideForMaxBECount = |
| 9840 | IsSigned ? getSignedRangeMin(Stride) : getUnsignedRangeMin(Stride); |
Anna Thomas | a2ca902 | 2017-10-13 14:30:43 +0000 | [diff] [blame] | 9841 | |
Sanjoy Das | 8499ebf | 2017-10-25 21:41:00 +0000 | [diff] [blame] | 9842 | // We already know that the stride is positive, so we paper over conservatism |
| 9843 | // in our range computation by forcing StrideForMaxBECount to be at least one. |
| 9844 | // In theory this is unnecessary, but we expect MaxBECount to be a |
| 9845 | // SCEVConstant, and (udiv <constant> 0) is not constant folded by SCEV (there |
| 9846 | // is nothing to constant fold it to). |
| 9847 | APInt One(BitWidth, 1, IsSigned); |
| 9848 | StrideForMaxBECount = APIntOps::smax(One, StrideForMaxBECount); |
Anna Thomas | a2ca902 | 2017-10-13 14:30:43 +0000 | [diff] [blame] | 9849 | |
| 9850 | APInt MaxValue = IsSigned ? APInt::getSignedMaxValue(BitWidth) |
| 9851 | : APInt::getMaxValue(BitWidth); |
| 9852 | APInt Limit = MaxValue - (StrideForMaxBECount - 1); |
| 9853 | |
| 9854 | // Although End can be a MAX expression we estimate MaxEnd considering only |
| 9855 | // the case End = RHS of the loop termination condition. This is safe because |
| 9856 | // in the other case (End - Start) is zero, leading to a zero maximum backedge |
| 9857 | // taken count. |
| 9858 | APInt MaxEnd = IsSigned ? APIntOps::smin(getSignedRangeMax(End), Limit) |
| 9859 | : APIntOps::umin(getUnsignedRangeMax(End), Limit); |
| 9860 | |
| 9861 | MaxBECount = computeBECount(getConstant(MaxEnd - MinStart) /* Delta */, |
| 9862 | getConstant(StrideForMaxBECount) /* Step */, |
| 9863 | false /* Equality */); |
| 9864 | |
| 9865 | return MaxBECount; |
| 9866 | } |
| 9867 | |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 9868 | ScalarEvolution::ExitLimit |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 9869 | ScalarEvolution::howManyLessThans(const SCEV *LHS, const SCEV *RHS, |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9870 | const Loop *L, bool IsSigned, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 9871 | bool ControlsExit, bool AllowPredicates) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 9872 | SmallPtrSet<const SCEVPredicate *, 4> Predicates; |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 9873 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9874 | const SCEVAddRecExpr *IV = dyn_cast<SCEVAddRecExpr>(LHS); |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9875 | bool PredicatedIV = false; |
| 9876 | |
| 9877 | if (!IV && AllowPredicates) { |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 9878 | // Try to make this an AddRec using runtime tests, in the first X |
| 9879 | // iterations of this loop, where X is the SCEV expression found by the |
| 9880 | // algorithm below. |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 9881 | IV = convertSCEVToAddRecWithPredicates(LHS, L, Predicates); |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9882 | PredicatedIV = true; |
| 9883 | } |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 9884 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9885 | // Avoid weird loops |
| 9886 | if (!IV || IV->getLoop() != L || !IV->isAffine()) |
| 9887 | return getCouldNotCompute(); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 9888 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 9889 | bool NoWrap = ControlsExit && |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9890 | IV->getNoWrapFlags(IsSigned ? SCEV::FlagNSW : SCEV::FlagNUW); |
Wojciech Matyjewicz | 35545fd | 2008-02-13 11:51:34 +0000 | [diff] [blame] | 9891 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9892 | const SCEV *Stride = IV->getStepRecurrence(*this); |
Wojciech Matyjewicz | 35545fd | 2008-02-13 11:51:34 +0000 | [diff] [blame] | 9893 | |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9894 | bool PositiveStride = isKnownPositive(Stride); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 9895 | |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9896 | // Avoid negative or zero stride values. |
| 9897 | if (!PositiveStride) { |
| 9898 | // We can compute the correct backedge taken count for loops with unknown |
| 9899 | // strides if we can prove that the loop is not an infinite loop with side |
| 9900 | // effects. Here's the loop structure we are trying to handle - |
| 9901 | // |
| 9902 | // i = start |
| 9903 | // do { |
| 9904 | // A[i] = i; |
| 9905 | // i += s; |
| 9906 | // } while (i < end); |
| 9907 | // |
| 9908 | // The backedge taken count for such loops is evaluated as - |
| 9909 | // (max(end, start + stride) - start - 1) /u stride |
| 9910 | // |
| 9911 | // The additional preconditions that we need to check to prove correctness |
| 9912 | // of the above formula is as follows - |
| 9913 | // |
| 9914 | // a) IV is either nuw or nsw depending upon signedness (indicated by the |
| 9915 | // NoWrap flag). |
| 9916 | // b) loop is single exit with no side effects. |
| 9917 | // |
| 9918 | // |
| 9919 | // Precondition a) implies that if the stride is negative, this is a single |
| 9920 | // trip loop. The backedge taken count formula reduces to zero in this case. |
| 9921 | // |
| 9922 | // Precondition b) implies that the unknown stride cannot be zero otherwise |
| 9923 | // we have UB. |
| 9924 | // |
| 9925 | // The positive stride case is the same as isKnownPositive(Stride) returning |
| 9926 | // true (original behavior of the function). |
| 9927 | // |
| 9928 | // We want to make sure that the stride is truly unknown as there are edge |
| 9929 | // cases where ScalarEvolution propagates no wrap flags to the |
| 9930 | // post-increment/decrement IV even though the increment/decrement operation |
| 9931 | // itself is wrapping. The computed backedge taken count may be wrong in |
| 9932 | // such cases. This is prevented by checking that the stride is not known to |
| 9933 | // be either positive or non-positive. For example, no wrap flags are |
| 9934 | // propagated to the post-increment IV of this loop with a trip count of 2 - |
| 9935 | // |
| 9936 | // unsigned char i; |
| 9937 | // for(i=127; i<128; i+=129) |
| 9938 | // A[i] = i; |
| 9939 | // |
| 9940 | if (PredicatedIV || !NoWrap || isKnownNonPositive(Stride) || |
| 9941 | !loopHasNoSideEffects(L)) |
| 9942 | return getCouldNotCompute(); |
David L Kreitzer | 8bbabee | 2016-09-16 14:38:13 +0000 | [diff] [blame] | 9943 | } else if (!Stride->isOne() && |
| 9944 | doesIVOverflowOnLT(RHS, Stride, IsSigned, NoWrap)) |
| 9945 | // Avoid proven overflow cases: this will ensure that the backedge taken |
| 9946 | // count will not generate any unsigned overflow. Relaxed no-overflow |
| 9947 | // conditions exploit NoWrapFlags, allowing to optimize in presence of |
| 9948 | // undefined behaviors like the case of C language. |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9949 | return getCouldNotCompute(); |
Dan Gohman | 2b8da35 | 2009-04-30 20:47:05 +0000 | [diff] [blame] | 9950 | |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9951 | ICmpInst::Predicate Cond = IsSigned ? ICmpInst::ICMP_SLT |
| 9952 | : ICmpInst::ICMP_ULT; |
| 9953 | const SCEV *Start = IV->getStart(); |
| 9954 | const SCEV *End = RHS; |
Anna Thomas | a2ca902 | 2017-10-13 14:30:43 +0000 | [diff] [blame] | 9955 | // When the RHS is not invariant, we do not know the end bound of the loop and |
| 9956 | // cannot calculate the ExactBECount needed by ExitLimit. However, we can |
| 9957 | // calculate the MaxBECount, given the start, stride and max value for the end |
| 9958 | // bound of the loop (RHS), and the fact that IV does not overflow (which is |
| 9959 | // checked above). |
| 9960 | if (!isLoopInvariant(RHS, L)) { |
Anna Thomas | 79503c0 | 2017-10-16 17:47:17 +0000 | [diff] [blame] | 9961 | const SCEV *MaxBECount = computeMaxBECountForLT( |
Anna Thomas | a2ca902 | 2017-10-13 14:30:43 +0000 | [diff] [blame] | 9962 | Start, Stride, RHS, getTypeSizeInBits(LHS->getType()), IsSigned); |
| 9963 | return ExitLimit(getCouldNotCompute() /* ExactNotTaken */, MaxBECount, |
| 9964 | false /*MaxOrZero*/, Predicates); |
| 9965 | } |
John Brawn | ecf7930 | 2016-10-18 10:10:53 +0000 | [diff] [blame] | 9966 | // If the backedge is taken at least once, then it will be taken |
| 9967 | // (End-Start)/Stride times (rounded up to a multiple of Stride), where Start |
| 9968 | // is the LHS value of the less-than comparison the first time it is evaluated |
| 9969 | // and End is the RHS. |
| 9970 | const SCEV *BECountIfBackedgeTaken = |
| 9971 | computeBECount(getMinusSCEV(End, Start), Stride, false); |
| 9972 | // If the loop entry is guarded by the result of the backedge test of the |
| 9973 | // first loop iteration, then we know the backedge will be taken at least |
| 9974 | // once and so the backedge taken count is as above. If not then we use the |
| 9975 | // expression (max(End,Start)-Start)/Stride to describe the backedge count, |
| 9976 | // as if the backedge is taken at least once max(End,Start) is End and so the |
| 9977 | // result is as above, and if not max(End,Start) is Start so we get a backedge |
| 9978 | // count of zero. |
| 9979 | const SCEV *BECount; |
| 9980 | if (isLoopEntryGuardedByCond(L, Cond, getMinusSCEV(Start, Stride), RHS)) |
| 9981 | BECount = BECountIfBackedgeTaken; |
| 9982 | else { |
Sanjoy Das | e8fd956 | 2016-06-18 04:38:31 +0000 | [diff] [blame] | 9983 | End = IsSigned ? getSMaxExpr(RHS, Start) : getUMaxExpr(RHS, Start); |
John Brawn | ecf7930 | 2016-10-18 10:10:53 +0000 | [diff] [blame] | 9984 | BECount = computeBECount(getMinusSCEV(End, Start), Stride, false); |
| 9985 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9986 | |
Arnaud A. de Grandmaison | 75c9e6d | 2014-03-15 22:13:15 +0000 | [diff] [blame] | 9987 | const SCEV *MaxBECount; |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 9988 | bool MaxOrZero = false; |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 9989 | if (isa<SCEVConstant>(BECount)) |
| 9990 | MaxBECount = BECount; |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 9991 | else if (isa<SCEVConstant>(BECountIfBackedgeTaken)) { |
John Brawn | ecf7930 | 2016-10-18 10:10:53 +0000 | [diff] [blame] | 9992 | // If we know exactly how many times the backedge will be taken if it's |
| 9993 | // taken at least once, then the backedge count will either be that or |
| 9994 | // zero. |
| 9995 | MaxBECount = BECountIfBackedgeTaken; |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 9996 | MaxOrZero = true; |
| 9997 | } else { |
Anna Thomas | 79503c0 | 2017-10-16 17:47:17 +0000 | [diff] [blame] | 9998 | MaxBECount = computeMaxBECountForLT( |
| 9999 | Start, Stride, RHS, getTypeSizeInBits(LHS->getType()), IsSigned); |
John Brawn | ecf7930 | 2016-10-18 10:10:53 +0000 | [diff] [blame] | 10000 | } |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10001 | |
Sanjoy Das | 036dda2 | 2017-05-22 06:46:04 +0000 | [diff] [blame] | 10002 | if (isa<SCEVCouldNotCompute>(MaxBECount) && |
| 10003 | !isa<SCEVCouldNotCompute>(BECount)) |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 10004 | MaxBECount = getConstant(getUnsignedRangeMax(BECount)); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10005 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 10006 | return ExitLimit(BECount, MaxBECount, MaxOrZero, Predicates); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10007 | } |
| 10008 | |
| 10009 | ScalarEvolution::ExitLimit |
Sanjoy Das | 108fcf2 | 2016-05-29 00:38:00 +0000 | [diff] [blame] | 10010 | ScalarEvolution::howManyGreaterThans(const SCEV *LHS, const SCEV *RHS, |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10011 | const Loop *L, bool IsSigned, |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 10012 | bool ControlsExit, bool AllowPredicates) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 10013 | SmallPtrSet<const SCEVPredicate *, 4> Predicates; |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10014 | // We handle only IV > Invariant |
| 10015 | if (!isLoopInvariant(RHS, L)) |
| 10016 | return getCouldNotCompute(); |
| 10017 | |
| 10018 | const SCEVAddRecExpr *IV = dyn_cast<SCEVAddRecExpr>(LHS); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 10019 | if (!IV && AllowPredicates) |
| 10020 | // Try to make this an AddRec using runtime tests, in the first X |
| 10021 | // iterations of this loop, where X is the SCEV expression found by the |
| 10022 | // algorithm below. |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 10023 | IV = convertSCEVToAddRecWithPredicates(LHS, L, Predicates); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10024 | |
| 10025 | // Avoid weird loops |
| 10026 | if (!IV || IV->getLoop() != L || !IV->isAffine()) |
| 10027 | return getCouldNotCompute(); |
| 10028 | |
Mark Heffernan | 2beab5f | 2014-10-10 17:39:11 +0000 | [diff] [blame] | 10029 | bool NoWrap = ControlsExit && |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10030 | IV->getNoWrapFlags(IsSigned ? SCEV::FlagNSW : SCEV::FlagNUW); |
| 10031 | |
| 10032 | const SCEV *Stride = getNegativeSCEV(IV->getStepRecurrence(*this)); |
| 10033 | |
| 10034 | // Avoid negative or zero stride values |
| 10035 | if (!isKnownPositive(Stride)) |
| 10036 | return getCouldNotCompute(); |
| 10037 | |
| 10038 | // Avoid proven overflow cases: this will ensure that the backedge taken count |
| 10039 | // will not generate any unsigned overflow. Relaxed no-overflow conditions |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 10040 | // exploit NoWrapFlags, allowing to optimize in presence of undefined |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10041 | // behaviors like the case of C language. |
| 10042 | if (!Stride->isOne() && doesIVOverflowOnGT(RHS, Stride, IsSigned, NoWrap)) |
| 10043 | return getCouldNotCompute(); |
| 10044 | |
| 10045 | ICmpInst::Predicate Cond = IsSigned ? ICmpInst::ICMP_SGT |
| 10046 | : ICmpInst::ICMP_UGT; |
| 10047 | |
| 10048 | const SCEV *Start = IV->getStart(); |
| 10049 | const SCEV *End = RHS; |
Sanjoy Das | e8fd956 | 2016-06-18 04:38:31 +0000 | [diff] [blame] | 10050 | if (!isLoopEntryGuardedByCond(L, Cond, getAddExpr(Start, Stride), RHS)) |
| 10051 | End = IsSigned ? getSMinExpr(RHS, Start) : getUMinExpr(RHS, Start); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10052 | |
| 10053 | const SCEV *BECount = computeBECount(getMinusSCEV(Start, End), Stride, false); |
| 10054 | |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 10055 | APInt MaxStart = IsSigned ? getSignedRangeMax(Start) |
| 10056 | : getUnsignedRangeMax(Start); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10057 | |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 10058 | APInt MinStride = IsSigned ? getSignedRangeMin(Stride) |
| 10059 | : getUnsignedRangeMin(Stride); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10060 | |
| 10061 | unsigned BitWidth = getTypeSizeInBits(LHS->getType()); |
| 10062 | APInt Limit = IsSigned ? APInt::getSignedMinValue(BitWidth) + (MinStride - 1) |
| 10063 | : APInt::getMinValue(BitWidth) + (MinStride - 1); |
| 10064 | |
| 10065 | // Although End can be a MIN expression we estimate MinEnd considering only |
| 10066 | // the case End = RHS. This is safe because in the other case (Start - End) |
| 10067 | // is zero, leading to a zero maximum backedge taken count. |
| 10068 | APInt MinEnd = |
Craig Topper | 0102039 | 2017-06-24 23:34:50 +0000 | [diff] [blame] | 10069 | IsSigned ? APIntOps::smax(getSignedRangeMin(RHS), Limit) |
| 10070 | : APIntOps::umax(getUnsignedRangeMin(RHS), Limit); |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10071 | |
| 10072 | |
| 10073 | const SCEV *MaxBECount = getCouldNotCompute(); |
| 10074 | if (isa<SCEVConstant>(BECount)) |
| 10075 | MaxBECount = BECount; |
| 10076 | else |
Johannes Doerfert | 2683e56 | 2015-02-09 12:34:23 +0000 | [diff] [blame] | 10077 | MaxBECount = computeBECount(getConstant(MaxStart - MinEnd), |
Andrew Trick | 34e2f0c | 2013-11-06 02:08:26 +0000 | [diff] [blame] | 10078 | getConstant(MinStride), false); |
| 10079 | |
| 10080 | if (isa<SCEVCouldNotCompute>(MaxBECount)) |
| 10081 | MaxBECount = BECount; |
| 10082 | |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 10083 | return ExitLimit(BECount, MaxBECount, false, Predicates); |
Chris Lattner | 587a75b | 2005-08-15 23:33:51 +0000 | [diff] [blame] | 10084 | } |
| 10085 | |
Benjamin Kramer | c321e53 | 2016-06-08 19:09:22 +0000 | [diff] [blame] | 10086 | const SCEV *SCEVAddRecExpr::getNumIterationsInRange(const ConstantRange &Range, |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 10087 | ScalarEvolution &SE) const { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10088 | if (Range.isFullSet()) // Infinite loop. |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 10089 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10090 | |
| 10091 | // If the start is a non-zero constant, shift the range to simplify things. |
Dan Gohman | a30370b | 2009-05-04 22:02:23 +0000 | [diff] [blame] | 10092 | if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(getStart())) |
Reid Spencer | 2e54a15 | 2007-03-02 00:28:52 +0000 | [diff] [blame] | 10093 | if (!SC->getValue()->isZero()) { |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 10094 | SmallVector<const SCEV *, 4> Operands(op_begin(), op_end()); |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 10095 | Operands[0] = SE.getZero(SC->getType()); |
Andrew Trick | 8b55b73 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 10096 | const SCEV *Shifted = SE.getAddRecExpr(Operands, getLoop(), |
Andrew Trick | f6b01ff | 2011-03-15 00:37:00 +0000 | [diff] [blame] | 10097 | getNoWrapFlags(FlagNW)); |
Sanjoy Das | 6391459 | 2015-10-18 00:29:20 +0000 | [diff] [blame] | 10098 | if (const auto *ShiftedAddRec = dyn_cast<SCEVAddRecExpr>(Shifted)) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10099 | return ShiftedAddRec->getNumIterationsInRange( |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 10100 | Range.subtract(SC->getAPInt()), SE); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10101 | // This is strange and shouldn't happen. |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 10102 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10103 | } |
| 10104 | |
| 10105 | // The only time we can solve this is when we have all constant indices. |
| 10106 | // Otherwise, we cannot determine the overflow conditions. |
Sanjoy Das | ff3b8b4 | 2015-12-01 07:49:23 +0000 | [diff] [blame] | 10107 | if (any_of(operands(), [](const SCEV *Op) { return !isa<SCEVConstant>(Op); })) |
Sanjoy Das | f07d2a7 | 2015-10-18 00:29:23 +0000 | [diff] [blame] | 10108 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10109 | |
| 10110 | // Okay at this point we know that all elements of the chrec are constants and |
| 10111 | // that the start element is zero. |
| 10112 | |
| 10113 | // First check to see if the range contains zero. If not, the first |
| 10114 | // iteration exits. |
Dan Gohman | b397e1a | 2009-04-21 01:07:12 +0000 | [diff] [blame] | 10115 | unsigned BitWidth = SE.getTypeSizeInBits(getType()); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 10116 | if (!Range.contains(APInt(BitWidth, 0))) |
Sanjoy Das | 2aacc0e | 2015-09-23 01:59:04 +0000 | [diff] [blame] | 10117 | return SE.getZero(getType()); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10118 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10119 | if (isAffine()) { |
| 10120 | // If this is an affine expression then we have this situation: |
| 10121 | // Solve {0,+,A} in Range === Ax in Range |
| 10122 | |
Nick Lewycky | 5246026 | 2007-07-16 02:08:00 +0000 | [diff] [blame] | 10123 | // We know that zero is in the range. If A is positive then we know that |
| 10124 | // the upper value of the range must be the first possible exit value. |
| 10125 | // If A is negative then the lower of the range is the last possible loop |
| 10126 | // value. Also note that we already checked for a full range. |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 10127 | APInt A = cast<SCEVConstant>(getOperand(1))->getAPInt(); |
Craig Topper | c97fdb8 | 2017-05-06 05:15:11 +0000 | [diff] [blame] | 10128 | APInt End = A.sge(1) ? (Range.getUpper() - 1) : Range.getLower(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10129 | |
Nick Lewycky | 5246026 | 2007-07-16 02:08:00 +0000 | [diff] [blame] | 10130 | // The exit value should be (End+A)/A. |
Nick Lewycky | 3934961 | 2007-09-27 14:12:54 +0000 | [diff] [blame] | 10131 | APInt ExitVal = (End + A).udiv(A); |
Owen Anderson | edb4a70 | 2009-07-24 23:12:02 +0000 | [diff] [blame] | 10132 | ConstantInt *ExitValue = ConstantInt::get(SE.getContext(), ExitVal); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10133 | |
| 10134 | // Evaluate at the exit value. If we really did fall out of the valid |
| 10135 | // range, then we computed our trip count, otherwise wrap around or other |
| 10136 | // things must have happened. |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10137 | ConstantInt *Val = EvaluateConstantChrecAtConstant(this, ExitValue, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 10138 | if (Range.contains(Val->getValue())) |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 10139 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10140 | |
| 10141 | // Ensure that the previous value is in the range. This is a sanity check. |
Reid Spencer | 3a7e9d8 | 2007-02-28 19:57:34 +0000 | [diff] [blame] | 10142 | assert(Range.contains( |
Dan Gohman | ce973df | 2009-06-24 04:48:43 +0000 | [diff] [blame] | 10143 | EvaluateConstantChrecAtConstant(this, |
Craig Topper | c97fdb8 | 2017-05-06 05:15:11 +0000 | [diff] [blame] | 10144 | ConstantInt::get(SE.getContext(), ExitVal - 1), SE)->getValue()) && |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10145 | "Linear scev computation is off in a bad way!"); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10146 | return SE.getConstant(ExitValue); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10147 | } else if (isQuadratic()) { |
| 10148 | // If this is a quadratic (3-term) AddRec {L,+,M,+,N}, find the roots of the |
| 10149 | // quadratic equation to solve it. To do this, we must frame our problem in |
| 10150 | // terms of figuring out when zero is crossed, instead of when |
| 10151 | // Range.getUpper() is crossed. |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 10152 | SmallVector<const SCEV *, 4> NewOps(op_begin(), op_end()); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10153 | NewOps[0] = SE.getNegativeSCEV(SE.getConstant(Range.getUpper())); |
Sanjoy Das | 54e6a21 | 2016-10-02 00:09:45 +0000 | [diff] [blame] | 10154 | const SCEV *NewAddRec = SE.getAddRecExpr(NewOps, getLoop(), FlagAnyWrap); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10155 | |
| 10156 | // Next, solve the constructed addrec |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 10157 | if (auto Roots = |
| 10158 | SolveQuadraticEquation(cast<SCEVAddRecExpr>(NewAddRec), SE)) { |
Sanjoy Das | 5a3d893 | 2016-06-15 04:37:47 +0000 | [diff] [blame] | 10159 | const SCEVConstant *R1 = Roots->first; |
| 10160 | const SCEVConstant *R2 = Roots->second; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10161 | // Pick the smallest positive root value. |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 10162 | if (ConstantInt *CB = dyn_cast<ConstantInt>(ConstantExpr::getICmp( |
| 10163 | ICmpInst::ICMP_ULT, R1->getValue(), R2->getValue()))) { |
David Blaikie | dc3f01e | 2015-03-09 01:57:13 +0000 | [diff] [blame] | 10164 | if (!CB->getZExtValue()) |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 10165 | std::swap(R1, R2); // R1 is the minimum root now. |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10166 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10167 | // Make sure the root is not off by one. The returned iteration should |
| 10168 | // not be in the range, but the previous one should be. When solving |
| 10169 | // for "X*X < 5", for example, we should not return a root of 2. |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 10170 | ConstantInt *R1Val = |
| 10171 | EvaluateConstantChrecAtConstant(this, R1->getValue(), SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 10172 | if (Range.contains(R1Val->getValue())) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10173 | // The next iteration must be out of the range... |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 10174 | ConstantInt *NextVal = |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 10175 | ConstantInt::get(SE.getContext(), R1->getAPInt() + 1); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10176 | |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10177 | R1Val = EvaluateConstantChrecAtConstant(this, NextVal, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 10178 | if (!Range.contains(R1Val->getValue())) |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10179 | return SE.getConstant(NextVal); |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 10180 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10181 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10182 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10183 | // If R1 was not in the range, then it is a good return value. Make |
| 10184 | // sure that R1-1 WAS in the range though, just in case. |
Owen Anderson | f1f1743 | 2009-07-06 22:37:39 +0000 | [diff] [blame] | 10185 | ConstantInt *NextVal = |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 10186 | ConstantInt::get(SE.getContext(), R1->getAPInt() - 1); |
Dan Gohman | a37eaf2 | 2007-10-22 18:31:58 +0000 | [diff] [blame] | 10187 | R1Val = EvaluateConstantChrecAtConstant(this, NextVal, SE); |
Reid Spencer | 6a44033 | 2007-03-01 07:54:15 +0000 | [diff] [blame] | 10188 | if (Range.contains(R1Val->getValue())) |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10189 | return R1; |
Sanjoy Das | 0e392d5 | 2016-06-15 04:37:50 +0000 | [diff] [blame] | 10190 | return SE.getCouldNotCompute(); // Something strange happened |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10191 | } |
| 10192 | } |
| 10193 | } |
| 10194 | |
Dan Gohman | 31efa30 | 2009-04-18 17:58:19 +0000 | [diff] [blame] | 10195 | return SE.getCouldNotCompute(); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10196 | } |
| 10197 | |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 10198 | // Return true when S contains at least an undef value. |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 10199 | static inline bool containsUndefs(const SCEV *S) { |
| 10200 | return SCEVExprContains(S, [](const SCEV *S) { |
| 10201 | if (const auto *SU = dyn_cast<SCEVUnknown>(S)) |
| 10202 | return isa<UndefValue>(SU->getValue()); |
| 10203 | else if (const auto *SC = dyn_cast<SCEVConstant>(S)) |
| 10204 | return isa<UndefValue>(SC->getValue()); |
| 10205 | return false; |
| 10206 | }); |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 10207 | } |
| 10208 | |
| 10209 | namespace { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10210 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10211 | // Collect all steps of SCEV expressions. |
| 10212 | struct SCEVCollectStrides { |
| 10213 | ScalarEvolution &SE; |
| 10214 | SmallVectorImpl<const SCEV *> &Strides; |
| 10215 | |
| 10216 | SCEVCollectStrides(ScalarEvolution &SE, SmallVectorImpl<const SCEV *> &S) |
| 10217 | : SE(SE), Strides(S) {} |
| 10218 | |
| 10219 | bool follow(const SCEV *S) { |
| 10220 | if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) |
| 10221 | Strides.push_back(AR->getStepRecurrence(SE)); |
| 10222 | return true; |
| 10223 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10224 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10225 | bool isDone() const { return false; } |
| 10226 | }; |
| 10227 | |
| 10228 | // Collect all SCEVUnknown and SCEVMulExpr expressions. |
| 10229 | struct SCEVCollectTerms { |
| 10230 | SmallVectorImpl<const SCEV *> &Terms; |
| 10231 | |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10232 | SCEVCollectTerms(SmallVectorImpl<const SCEV *> &T) : Terms(T) {} |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10233 | |
| 10234 | bool follow(const SCEV *S) { |
Tobias Grosser | 2bbec0e | 2016-10-17 11:56:26 +0000 | [diff] [blame] | 10235 | if (isa<SCEVUnknown>(S) || isa<SCEVMulExpr>(S) || |
| 10236 | isa<SCEVSignExtendExpr>(S)) { |
Sebastian Pop | a7d3d6a | 2014-05-07 19:00:32 +0000 | [diff] [blame] | 10237 | if (!containsUndefs(S)) |
| 10238 | Terms.push_back(S); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10239 | |
| 10240 | // Stop recursion: once we collected a term, do not walk its operands. |
| 10241 | return false; |
| 10242 | } |
| 10243 | |
| 10244 | // Keep looking. |
| 10245 | return true; |
| 10246 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10247 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10248 | bool isDone() const { return false; } |
| 10249 | }; |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10250 | |
| 10251 | // Check if a SCEV contains an AddRecExpr. |
| 10252 | struct SCEVHasAddRec { |
| 10253 | bool &ContainsAddRec; |
| 10254 | |
| 10255 | SCEVHasAddRec(bool &ContainsAddRec) : ContainsAddRec(ContainsAddRec) { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10256 | ContainsAddRec = false; |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10257 | } |
| 10258 | |
| 10259 | bool follow(const SCEV *S) { |
| 10260 | if (isa<SCEVAddRecExpr>(S)) { |
| 10261 | ContainsAddRec = true; |
| 10262 | |
| 10263 | // Stop recursion: once we collected a term, do not walk its operands. |
| 10264 | return false; |
| 10265 | } |
| 10266 | |
| 10267 | // Keep looking. |
| 10268 | return true; |
| 10269 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10270 | |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10271 | bool isDone() const { return false; } |
| 10272 | }; |
| 10273 | |
| 10274 | // Find factors that are multiplied with an expression that (possibly as a |
| 10275 | // subexpression) contains an AddRecExpr. In the expression: |
| 10276 | // |
| 10277 | // 8 * (100 + %p * %q * (%a + {0, +, 1}_loop)) |
| 10278 | // |
| 10279 | // "%p * %q" are factors multiplied by the expression "(%a + {0, +, 1}_loop)" |
| 10280 | // that contains the AddRec {0, +, 1}_loop. %p * %q are likely to be array size |
| 10281 | // parameters as they form a product with an induction variable. |
| 10282 | // |
| 10283 | // This collector expects all array size parameters to be in the same MulExpr. |
| 10284 | // It might be necessary to later add support for collecting parameters that are |
| 10285 | // spread over different nested MulExpr. |
| 10286 | struct SCEVCollectAddRecMultiplies { |
| 10287 | SmallVectorImpl<const SCEV *> &Terms; |
| 10288 | ScalarEvolution &SE; |
| 10289 | |
| 10290 | SCEVCollectAddRecMultiplies(SmallVectorImpl<const SCEV *> &T, ScalarEvolution &SE) |
| 10291 | : Terms(T), SE(SE) {} |
| 10292 | |
| 10293 | bool follow(const SCEV *S) { |
| 10294 | if (auto *Mul = dyn_cast<SCEVMulExpr>(S)) { |
| 10295 | bool HasAddRec = false; |
| 10296 | SmallVector<const SCEV *, 0> Operands; |
| 10297 | for (auto Op : Mul->operands()) { |
Tobias Grosser | e3684d0 | 2017-05-27 15:17:49 +0000 | [diff] [blame] | 10298 | const SCEVUnknown *Unknown = dyn_cast<SCEVUnknown>(Op); |
| 10299 | if (Unknown && !isa<CallInst>(Unknown->getValue())) { |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10300 | Operands.push_back(Op); |
Tobias Grosser | e3684d0 | 2017-05-27 15:17:49 +0000 | [diff] [blame] | 10301 | } else if (Unknown) { |
| 10302 | HasAddRec = true; |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10303 | } else { |
| 10304 | bool ContainsAddRec; |
| 10305 | SCEVHasAddRec ContiansAddRec(ContainsAddRec); |
| 10306 | visitAll(Op, ContiansAddRec); |
| 10307 | HasAddRec |= ContainsAddRec; |
| 10308 | } |
| 10309 | } |
| 10310 | if (Operands.size() == 0) |
| 10311 | return true; |
| 10312 | |
| 10313 | if (!HasAddRec) |
| 10314 | return false; |
| 10315 | |
| 10316 | Terms.push_back(SE.getMulExpr(Operands)); |
| 10317 | // Stop recursion: once we collected a term, do not walk its operands. |
| 10318 | return false; |
| 10319 | } |
| 10320 | |
| 10321 | // Keep looking. |
| 10322 | return true; |
| 10323 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10324 | |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10325 | bool isDone() const { return false; } |
| 10326 | }; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10327 | |
| 10328 | } // end anonymous namespace |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10329 | |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10330 | /// Find parametric terms in this SCEVAddRecExpr. We first for parameters in |
| 10331 | /// two places: |
| 10332 | /// 1) The strides of AddRec expressions. |
| 10333 | /// 2) Unknowns that are multiplied with AddRec expressions. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10334 | void ScalarEvolution::collectParametricTerms(const SCEV *Expr, |
| 10335 | SmallVectorImpl<const SCEV *> &Terms) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10336 | SmallVector<const SCEV *, 4> Strides; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10337 | SCEVCollectStrides StrideCollector(*this, Strides); |
| 10338 | visitAll(Expr, StrideCollector); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10339 | |
| 10340 | DEBUG({ |
| 10341 | dbgs() << "Strides:\n"; |
| 10342 | for (const SCEV *S : Strides) |
| 10343 | dbgs() << *S << "\n"; |
| 10344 | }); |
| 10345 | |
| 10346 | for (const SCEV *S : Strides) { |
| 10347 | SCEVCollectTerms TermCollector(Terms); |
| 10348 | visitAll(S, TermCollector); |
| 10349 | } |
| 10350 | |
| 10351 | DEBUG({ |
| 10352 | dbgs() << "Terms:\n"; |
| 10353 | for (const SCEV *T : Terms) |
| 10354 | dbgs() << *T << "\n"; |
| 10355 | }); |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10356 | |
| 10357 | SCEVCollectAddRecMultiplies MulCollector(Terms, *this); |
| 10358 | visitAll(Expr, MulCollector); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10359 | } |
| 10360 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10361 | static bool findArrayDimensionsRec(ScalarEvolution &SE, |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10362 | SmallVectorImpl<const SCEV *> &Terms, |
Sebastian Pop | 47fe7de | 2014-05-09 22:45:07 +0000 | [diff] [blame] | 10363 | SmallVectorImpl<const SCEV *> &Sizes) { |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 10364 | int Last = Terms.size() - 1; |
| 10365 | const SCEV *Step = Terms[Last]; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10366 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10367 | // End of recursion. |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 10368 | if (Last == 0) { |
| 10369 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(Step)) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10370 | SmallVector<const SCEV *, 2> Qs; |
| 10371 | for (const SCEV *Op : M->operands()) |
| 10372 | if (!isa<SCEVConstant>(Op)) |
| 10373 | Qs.push_back(Op); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10374 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 10375 | Step = SE.getMulExpr(Qs); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10376 | } |
| 10377 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 10378 | Sizes.push_back(Step); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10379 | return true; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10380 | } |
| 10381 | |
Benjamin Kramer | 8cff45a | 2014-05-10 17:47:18 +0000 | [diff] [blame] | 10382 | for (const SCEV *&Term : Terms) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10383 | // Normalize the terms before the next call to findArrayDimensionsRec. |
| 10384 | const SCEV *Q, *R; |
David Majnemer | 4e87936 | 2014-12-14 09:12:33 +0000 | [diff] [blame] | 10385 | SCEVDivision::divide(SE, Term, Step, &Q, &R); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10386 | |
| 10387 | // Bail out when GCD does not evenly divide one of the terms. |
| 10388 | if (!R->isZero()) |
| 10389 | return false; |
| 10390 | |
Benjamin Kramer | 8cff45a | 2014-05-10 17:47:18 +0000 | [diff] [blame] | 10391 | Term = Q; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10392 | } |
| 10393 | |
Tobias Grosser | 3080cf1 | 2014-05-08 07:55:34 +0000 | [diff] [blame] | 10394 | // Remove all SCEVConstants. |
David Majnemer | c700490 | 2016-08-12 04:32:37 +0000 | [diff] [blame] | 10395 | Terms.erase( |
| 10396 | remove_if(Terms, [](const SCEV *E) { return isa<SCEVConstant>(E); }), |
| 10397 | Terms.end()); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10398 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10399 | if (Terms.size() > 0) |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10400 | if (!findArrayDimensionsRec(SE, Terms, Sizes)) |
| 10401 | return false; |
| 10402 | |
Sebastian Pop | e30bd35 | 2014-05-27 22:41:56 +0000 | [diff] [blame] | 10403 | Sizes.push_back(Step); |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10404 | return true; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10405 | } |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10406 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10407 | // Returns true when one of the SCEVs of Terms contains a SCEVUnknown parameter. |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 10408 | static inline bool containsParameters(SmallVectorImpl<const SCEV *> &Terms) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10409 | for (const SCEV *T : Terms) |
Sanjoy Das | 0ae390a | 2016-11-10 06:33:54 +0000 | [diff] [blame] | 10410 | if (SCEVExprContains(T, isa<SCEVUnknown, const SCEV *>)) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10411 | return true; |
| 10412 | return false; |
| 10413 | } |
| 10414 | |
| 10415 | // Return the number of product terms in S. |
| 10416 | static inline int numberOfTerms(const SCEV *S) { |
| 10417 | if (const SCEVMulExpr *Expr = dyn_cast<SCEVMulExpr>(S)) |
| 10418 | return Expr->getNumOperands(); |
| 10419 | return 1; |
| 10420 | } |
| 10421 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10422 | static const SCEV *removeConstantFactors(ScalarEvolution &SE, const SCEV *T) { |
| 10423 | if (isa<SCEVConstant>(T)) |
| 10424 | return nullptr; |
| 10425 | |
| 10426 | if (isa<SCEVUnknown>(T)) |
| 10427 | return T; |
| 10428 | |
| 10429 | if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(T)) { |
| 10430 | SmallVector<const SCEV *, 2> Factors; |
| 10431 | for (const SCEV *Op : M->operands()) |
| 10432 | if (!isa<SCEVConstant>(Op)) |
| 10433 | Factors.push_back(Op); |
| 10434 | |
| 10435 | return SE.getMulExpr(Factors); |
| 10436 | } |
| 10437 | |
| 10438 | return T; |
| 10439 | } |
| 10440 | |
| 10441 | /// Return the size of an element read or written by Inst. |
| 10442 | const SCEV *ScalarEvolution::getElementSize(Instruction *Inst) { |
| 10443 | Type *Ty; |
| 10444 | if (StoreInst *Store = dyn_cast<StoreInst>(Inst)) |
| 10445 | Ty = Store->getValueOperand()->getType(); |
| 10446 | else if (LoadInst *Load = dyn_cast<LoadInst>(Inst)) |
Tobias Grosser | 40ac100 | 2014-06-08 19:21:20 +0000 | [diff] [blame] | 10447 | Ty = Load->getType(); |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10448 | else |
| 10449 | return nullptr; |
| 10450 | |
| 10451 | Type *ETy = getEffectiveSCEVType(PointerType::getUnqual(Ty)); |
| 10452 | return getSizeOfExpr(ETy, Ty); |
| 10453 | } |
| 10454 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10455 | void ScalarEvolution::findArrayDimensions(SmallVectorImpl<const SCEV *> &Terms, |
| 10456 | SmallVectorImpl<const SCEV *> &Sizes, |
Sanjoy Das | df8c2eb | 2017-05-07 05:29:36 +0000 | [diff] [blame] | 10457 | const SCEV *ElementSize) { |
Sebastian Pop | 5352408 | 2014-05-29 19:44:05 +0000 | [diff] [blame] | 10458 | if (Terms.size() < 1 || !ElementSize) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10459 | return; |
| 10460 | |
| 10461 | // Early return when Terms do not contain parameters: we do not delinearize |
| 10462 | // non parametric SCEVs. |
| 10463 | if (!containsParameters(Terms)) |
| 10464 | return; |
| 10465 | |
| 10466 | DEBUG({ |
| 10467 | dbgs() << "Terms:\n"; |
| 10468 | for (const SCEV *T : Terms) |
| 10469 | dbgs() << *T << "\n"; |
| 10470 | }); |
| 10471 | |
| 10472 | // Remove duplicates. |
Sanjoy Das | 40415ee | 2017-05-07 05:29:34 +0000 | [diff] [blame] | 10473 | array_pod_sort(Terms.begin(), Terms.end()); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10474 | Terms.erase(std::unique(Terms.begin(), Terms.end()), Terms.end()); |
| 10475 | |
| 10476 | // Put larger terms first. |
| 10477 | std::sort(Terms.begin(), Terms.end(), [](const SCEV *LHS, const SCEV *RHS) { |
| 10478 | return numberOfTerms(LHS) > numberOfTerms(RHS); |
| 10479 | }); |
| 10480 | |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10481 | // Try to divide all terms by the element size. If term is not divisible by |
| 10482 | // element size, proceed with the original term. |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10483 | for (const SCEV *&Term : Terms) { |
| 10484 | const SCEV *Q, *R; |
Sanjoy Das | df8c2eb | 2017-05-07 05:29:36 +0000 | [diff] [blame] | 10485 | SCEVDivision::divide(*this, Term, ElementSize, &Q, &R); |
Tobias Grosser | 374bce0 | 2015-10-12 08:02:00 +0000 | [diff] [blame] | 10486 | if (!Q->isZero()) |
| 10487 | Term = Q; |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10488 | } |
| 10489 | |
| 10490 | SmallVector<const SCEV *, 4> NewTerms; |
| 10491 | |
| 10492 | // Remove constant factors. |
| 10493 | for (const SCEV *T : Terms) |
Sanjoy Das | df8c2eb | 2017-05-07 05:29:36 +0000 | [diff] [blame] | 10494 | if (const SCEV *NewT = removeConstantFactors(*this, T)) |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10495 | NewTerms.push_back(NewT); |
| 10496 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10497 | DEBUG({ |
| 10498 | dbgs() << "Terms after sorting:\n"; |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10499 | for (const SCEV *T : NewTerms) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10500 | dbgs() << *T << "\n"; |
| 10501 | }); |
| 10502 | |
Sanjoy Das | df8c2eb | 2017-05-07 05:29:36 +0000 | [diff] [blame] | 10503 | if (NewTerms.empty() || !findArrayDimensionsRec(*this, NewTerms, Sizes)) { |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10504 | Sizes.clear(); |
| 10505 | return; |
| 10506 | } |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10507 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10508 | // The last element to be pushed into Sizes is the size of an element. |
| 10509 | Sizes.push_back(ElementSize); |
| 10510 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10511 | DEBUG({ |
| 10512 | dbgs() << "Sizes:\n"; |
| 10513 | for (const SCEV *S : Sizes) |
| 10514 | dbgs() << *S << "\n"; |
| 10515 | }); |
| 10516 | } |
| 10517 | |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10518 | void ScalarEvolution::computeAccessFunctions( |
| 10519 | const SCEV *Expr, SmallVectorImpl<const SCEV *> &Subscripts, |
| 10520 | SmallVectorImpl<const SCEV *> &Sizes) { |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10521 | // Early exit in case this SCEV is not an affine multivariate function. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10522 | if (Sizes.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10523 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10524 | |
Sanjoy Das | 1195dbe | 2015-10-08 03:45:58 +0000 | [diff] [blame] | 10525 | if (auto *AR = dyn_cast<SCEVAddRecExpr>(Expr)) |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10526 | if (!AR->isAffine()) |
| 10527 | return; |
| 10528 | |
| 10529 | const SCEV *Res = Expr; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10530 | int Last = Sizes.size() - 1; |
| 10531 | for (int i = Last; i >= 0; i--) { |
| 10532 | const SCEV *Q, *R; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10533 | SCEVDivision::divide(*this, Res, Sizes[i], &Q, &R); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10534 | |
| 10535 | DEBUG({ |
| 10536 | dbgs() << "Res: " << *Res << "\n"; |
| 10537 | dbgs() << "Sizes[i]: " << *Sizes[i] << "\n"; |
| 10538 | dbgs() << "Res divided by Sizes[i]:\n"; |
| 10539 | dbgs() << "Quotient: " << *Q << "\n"; |
| 10540 | dbgs() << "Remainder: " << *R << "\n"; |
| 10541 | }); |
| 10542 | |
| 10543 | Res = Q; |
| 10544 | |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10545 | // Do not record the last subscript corresponding to the size of elements in |
| 10546 | // the array. |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10547 | if (i == Last) { |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10548 | |
| 10549 | // Bail out if the remainder is too complex. |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10550 | if (isa<SCEVAddRecExpr>(R)) { |
| 10551 | Subscripts.clear(); |
| 10552 | Sizes.clear(); |
| 10553 | return; |
| 10554 | } |
Sebastian Pop | a6e5860 | 2014-05-27 22:41:45 +0000 | [diff] [blame] | 10555 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10556 | continue; |
| 10557 | } |
| 10558 | |
| 10559 | // Record the access function for the current subscript. |
| 10560 | Subscripts.push_back(R); |
| 10561 | } |
| 10562 | |
| 10563 | // Also push in last position the remainder of the last division: it will be |
| 10564 | // the access function of the innermost dimension. |
| 10565 | Subscripts.push_back(Res); |
| 10566 | |
| 10567 | std::reverse(Subscripts.begin(), Subscripts.end()); |
| 10568 | |
| 10569 | DEBUG({ |
| 10570 | dbgs() << "Subscripts:\n"; |
| 10571 | for (const SCEV *S : Subscripts) |
| 10572 | dbgs() << *S << "\n"; |
| 10573 | }); |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10574 | } |
| 10575 | |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10576 | /// Splits the SCEV into two vectors of SCEVs representing the subscripts and |
| 10577 | /// sizes of an array access. Returns the remainder of the delinearization that |
Sebastian Pop | 7ee1472 | 2013-11-13 22:37:58 +0000 | [diff] [blame] | 10578 | /// is the offset start of the array. The SCEV->delinearize algorithm computes |
| 10579 | /// the multiples of SCEV coefficients: that is a pattern matching of sub |
| 10580 | /// expressions in the stride and base of a SCEV corresponding to the |
| 10581 | /// computation of a GCD (greatest common divisor) of base and stride. When |
| 10582 | /// SCEV->delinearize fails, it returns the SCEV unchanged. |
| 10583 | /// |
| 10584 | /// For example: when analyzing the memory access A[i][j][k] in this loop nest |
| 10585 | /// |
| 10586 | /// void foo(long n, long m, long o, double A[n][m][o]) { |
| 10587 | /// |
| 10588 | /// for (long i = 0; i < n; i++) |
| 10589 | /// for (long j = 0; j < m; j++) |
| 10590 | /// for (long k = 0; k < o; k++) |
| 10591 | /// A[i][j][k] = 1.0; |
| 10592 | /// } |
| 10593 | /// |
| 10594 | /// the delinearization input is the following AddRec SCEV: |
| 10595 | /// |
| 10596 | /// AddRec: {{{%A,+,(8 * %m * %o)}<%for.i>,+,(8 * %o)}<%for.j>,+,8}<%for.k> |
| 10597 | /// |
| 10598 | /// From this SCEV, we are able to say that the base offset of the access is %A |
| 10599 | /// because it appears as an offset that does not divide any of the strides in |
| 10600 | /// the loops: |
| 10601 | /// |
| 10602 | /// CHECK: Base offset: %A |
| 10603 | /// |
| 10604 | /// and then SCEV->delinearize determines the size of some of the dimensions of |
| 10605 | /// the array as these are the multiples by which the strides are happening: |
| 10606 | /// |
| 10607 | /// CHECK: ArrayDecl[UnknownSize][%m][%o] with elements of sizeof(double) bytes. |
| 10608 | /// |
| 10609 | /// Note that the outermost dimension remains of UnknownSize because there are |
| 10610 | /// no strides that would help identifying the size of the last dimension: when |
| 10611 | /// the array has been statically allocated, one could compute the size of that |
| 10612 | /// dimension by dividing the overall size of the array by the size of the known |
| 10613 | /// dimensions: %m * %o * 8. |
| 10614 | /// |
| 10615 | /// Finally delinearize provides the access functions for the array reference |
| 10616 | /// that does correspond to A[i][j][k] of the above C testcase: |
| 10617 | /// |
| 10618 | /// CHECK: ArrayRef[{0,+,1}<%for.i>][{0,+,1}<%for.j>][{0,+,1}<%for.k>] |
| 10619 | /// |
| 10620 | /// The testcases are checking the output of a function pass: |
| 10621 | /// DelinearizationPass that walks through all loads and stores of a function |
| 10622 | /// asking for the SCEV of the memory access with respect to all enclosing |
| 10623 | /// loops, calling SCEV->delinearize on that and printing the results. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10624 | void ScalarEvolution::delinearize(const SCEV *Expr, |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10625 | SmallVectorImpl<const SCEV *> &Subscripts, |
| 10626 | SmallVectorImpl<const SCEV *> &Sizes, |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10627 | const SCEV *ElementSize) { |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10628 | // First step: collect parametric terms. |
| 10629 | SmallVector<const SCEV *, 4> Terms; |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10630 | collectParametricTerms(Expr, Terms); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10631 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10632 | if (Terms.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10633 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10634 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10635 | // Second step: find subscript sizes. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10636 | findArrayDimensions(Terms, Sizes, ElementSize); |
Sebastian Pop | 7ee1472 | 2013-11-13 22:37:58 +0000 | [diff] [blame] | 10637 | |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10638 | if (Sizes.empty()) |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10639 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10640 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10641 | // Third step: compute the access functions for each subscript. |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10642 | computeAccessFunctions(Expr, Subscripts, Sizes); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10643 | |
Sebastian Pop | 28e6b97 | 2014-05-27 22:41:51 +0000 | [diff] [blame] | 10644 | if (Subscripts.empty()) |
| 10645 | return; |
Sebastian Pop | b1a548f | 2014-05-12 19:01:53 +0000 | [diff] [blame] | 10646 | |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10647 | DEBUG({ |
Tobias Grosser | 3cdc37c | 2015-06-29 14:42:48 +0000 | [diff] [blame] | 10648 | dbgs() << "succeeded to delinearize " << *Expr << "\n"; |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10649 | dbgs() << "ArrayDecl[UnknownSize]"; |
| 10650 | for (const SCEV *S : Sizes) |
| 10651 | dbgs() << "[" << *S << "]"; |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10652 | |
Sebastian Pop | 444621a | 2014-05-09 22:45:02 +0000 | [diff] [blame] | 10653 | dbgs() << "\nArrayRef"; |
| 10654 | for (const SCEV *S : Subscripts) |
Sebastian Pop | 448712b | 2014-05-07 18:01:20 +0000 | [diff] [blame] | 10655 | dbgs() << "[" << *S << "]"; |
| 10656 | dbgs() << "\n"; |
| 10657 | }); |
Sebastian Pop | c62c679 | 2013-11-12 22:47:20 +0000 | [diff] [blame] | 10658 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10659 | |
| 10660 | //===----------------------------------------------------------------------===// |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10661 | // SCEVCallbackVH Class Implementation |
| 10662 | //===----------------------------------------------------------------------===// |
| 10663 | |
Dan Gohman | d33a090 | 2009-05-19 19:22:47 +0000 | [diff] [blame] | 10664 | void ScalarEvolution::SCEVCallbackVH::deleted() { |
Dan Gohman | dd707af | 2009-07-13 22:20:53 +0000 | [diff] [blame] | 10665 | assert(SE && "SCEVCallbackVH called with a null ScalarEvolution!"); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10666 | if (PHINode *PN = dyn_cast<PHINode>(getValPtr())) |
| 10667 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 10668 | SE->eraseValueFromMap(getValPtr()); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10669 | // this now dangles! |
| 10670 | } |
| 10671 | |
Dan Gohman | 7a06672 | 2010-07-28 01:09:07 +0000 | [diff] [blame] | 10672 | void ScalarEvolution::SCEVCallbackVH::allUsesReplacedWith(Value *V) { |
Dan Gohman | dd707af | 2009-07-13 22:20:53 +0000 | [diff] [blame] | 10673 | assert(SE && "SCEVCallbackVH called with a null ScalarEvolution!"); |
Eric Christopher | ef6d593 | 2010-07-29 01:25:38 +0000 | [diff] [blame] | 10674 | |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10675 | // Forget all the expressions associated with users of the old value, |
| 10676 | // so that future queries will recompute the expressions using the new |
| 10677 | // value. |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 10678 | Value *Old = getValPtr(); |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 10679 | SmallVector<User *, 16> Worklist(Old->user_begin(), Old->user_end()); |
Dan Gohman | f34f863 | 2009-07-14 14:34:04 +0000 | [diff] [blame] | 10680 | SmallPtrSet<User *, 8> Visited; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10681 | while (!Worklist.empty()) { |
| 10682 | User *U = Worklist.pop_back_val(); |
| 10683 | // Deleting the Old value will cause this to dangle. Postpone |
| 10684 | // that until everything else is done. |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 10685 | if (U == Old) |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10686 | continue; |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 10687 | if (!Visited.insert(U).second) |
Dan Gohman | f34f863 | 2009-07-14 14:34:04 +0000 | [diff] [blame] | 10688 | continue; |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10689 | if (PHINode *PN = dyn_cast<PHINode>(U)) |
| 10690 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 10691 | SE->eraseValueFromMap(U); |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 10692 | Worklist.insert(Worklist.end(), U->user_begin(), U->user_end()); |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10693 | } |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 10694 | // Delete the Old value. |
| 10695 | if (PHINode *PN = dyn_cast<PHINode>(Old)) |
| 10696 | SE->ConstantEvolutionLoopExitValue.erase(PN); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 10697 | SE->eraseValueFromMap(Old); |
Dan Gohman | 8aeb0fb | 2010-07-28 00:28:25 +0000 | [diff] [blame] | 10698 | // this now dangles! |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10699 | } |
| 10700 | |
Dan Gohman | d33a090 | 2009-05-19 19:22:47 +0000 | [diff] [blame] | 10701 | ScalarEvolution::SCEVCallbackVH::SCEVCallbackVH(Value *V, ScalarEvolution *se) |
Dan Gohman | 48f8222 | 2009-05-04 22:30:44 +0000 | [diff] [blame] | 10702 | : CallbackVH(V), SE(se) {} |
| 10703 | |
| 10704 | //===----------------------------------------------------------------------===// |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10705 | // ScalarEvolution Class Implementation |
| 10706 | //===----------------------------------------------------------------------===// |
| 10707 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10708 | ScalarEvolution::ScalarEvolution(Function &F, TargetLibraryInfo &TLI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 10709 | AssumptionCache &AC, DominatorTree &DT, |
| 10710 | LoopInfo &LI) |
| 10711 | : F(F), TLI(TLI), AC(AC), DT(DT), LI(LI), |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 10712 | CouldNotCompute(new SCEVCouldNotCompute()), ValuesAtScopes(64), |
| 10713 | LoopDispositions(64), BlockDispositions(64) { |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 10714 | // To use guards for proving predicates, we need to scan every instruction in |
| 10715 | // relevant basic blocks, and not just terminators. Doing this is a waste of |
| 10716 | // time if the IR does not actually contain any calls to |
| 10717 | // @llvm.experimental.guard, so do a quick check and remember this beforehand. |
| 10718 | // |
| 10719 | // This pessimizes the case where a pass that preserves ScalarEvolution wants |
| 10720 | // to _add_ guards to the module when there weren't any before, and wants |
| 10721 | // ScalarEvolution to optimize based on those guards. For now we prefer to be |
| 10722 | // efficient in lieu of being smart in that rather obscure case. |
| 10723 | |
| 10724 | auto *GuardDecl = F.getParent()->getFunction( |
| 10725 | Intrinsic::getName(Intrinsic::experimental_guard)); |
| 10726 | HasGuards = GuardDecl && !GuardDecl->use_empty(); |
| 10727 | } |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10728 | |
| 10729 | ScalarEvolution::ScalarEvolution(ScalarEvolution &&Arg) |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 10730 | : F(Arg.F), HasGuards(Arg.HasGuards), TLI(Arg.TLI), AC(Arg.AC), DT(Arg.DT), |
Sanjoy Das | 2512d0c | 2016-05-10 00:31:49 +0000 | [diff] [blame] | 10731 | LI(Arg.LI), CouldNotCompute(std::move(Arg.CouldNotCompute)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10732 | ValueExprMap(std::move(Arg.ValueExprMap)), |
Sanjoy Das | db93375 | 2016-09-27 18:01:38 +0000 | [diff] [blame] | 10733 | PendingLoopPredicates(std::move(Arg.PendingLoopPredicates)), |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 10734 | MinTrailingZerosCache(std::move(Arg.MinTrailingZerosCache)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10735 | BackedgeTakenCounts(std::move(Arg.BackedgeTakenCounts)), |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 10736 | PredicatedBackedgeTakenCounts( |
| 10737 | std::move(Arg.PredicatedBackedgeTakenCounts)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10738 | ConstantEvolutionLoopExitValue( |
| 10739 | std::move(Arg.ConstantEvolutionLoopExitValue)), |
| 10740 | ValuesAtScopes(std::move(Arg.ValuesAtScopes)), |
| 10741 | LoopDispositions(std::move(Arg.LoopDispositions)), |
Sanjoy Das | 5cb11b6 | 2016-09-26 02:44:10 +0000 | [diff] [blame] | 10742 | LoopPropertiesCache(std::move(Arg.LoopPropertiesCache)), |
Chandler Carruth | 68abda5 | 2016-09-26 04:49:58 +0000 | [diff] [blame] | 10743 | BlockDispositions(std::move(Arg.BlockDispositions)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10744 | UnsignedRanges(std::move(Arg.UnsignedRanges)), |
| 10745 | SignedRanges(std::move(Arg.SignedRanges)), |
| 10746 | UniqueSCEVs(std::move(Arg.UniqueSCEVs)), |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 10747 | UniquePreds(std::move(Arg.UniquePreds)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10748 | SCEVAllocator(std::move(Arg.SCEVAllocator)), |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 10749 | LoopUsers(std::move(Arg.LoopUsers)), |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 10750 | PredicatedSCEVRewrites(std::move(Arg.PredicatedSCEVRewrites)), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10751 | FirstUnknown(Arg.FirstUnknown) { |
| 10752 | Arg.FirstUnknown = nullptr; |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 10753 | } |
| 10754 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10755 | ScalarEvolution::~ScalarEvolution() { |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 10756 | // Iterate through all the SCEVUnknown instances and call their |
| 10757 | // destructors, so that they release their references to their values. |
Naomi Musgrave | f90c1be | 2015-09-16 23:46:40 +0000 | [diff] [blame] | 10758 | for (SCEVUnknown *U = FirstUnknown; U;) { |
| 10759 | SCEVUnknown *Tmp = U; |
| 10760 | U = U->Next; |
| 10761 | Tmp->~SCEVUnknown(); |
| 10762 | } |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 10763 | FirstUnknown = nullptr; |
Dan Gohman | 7cac957 | 2010-08-02 23:49:30 +0000 | [diff] [blame] | 10764 | |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 10765 | ExprValueMap.clear(); |
Dan Gohman | 9bad2fb | 2010-08-27 18:55:03 +0000 | [diff] [blame] | 10766 | ValueExprMap.clear(); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 10767 | HasRecMap.clear(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 10768 | |
| 10769 | // Free any extra memory created for ExitNotTakenInfo in the unlikely event |
| 10770 | // that a loop had multiple computable exits. |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 10771 | for (auto &BTCI : BackedgeTakenCounts) |
| 10772 | BTCI.second.clear(); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 10773 | for (auto &BTCI : PredicatedBackedgeTakenCounts) |
| 10774 | BTCI.second.clear(); |
Andrew Trick | 3ca3f98 | 2011-07-26 17:19:55 +0000 | [diff] [blame] | 10775 | |
Andrew Trick | 7fa4e0f | 2012-05-19 00:48:25 +0000 | [diff] [blame] | 10776 | assert(PendingLoopPredicates.empty() && "isImpliedCond garbage"); |
Sanjoy Das | b864c1f | 2015-04-01 18:24:06 +0000 | [diff] [blame] | 10777 | assert(!WalkingBEDominatingConds && "isLoopBackedgeGuardedByCond garbage!"); |
Sanjoy Das | 7d910f2 | 2015-10-02 18:50:30 +0000 | [diff] [blame] | 10778 | assert(!ProvingSplitPredicate && "ProvingSplitPredicate garbage!"); |
Dan Gohman | 0a40ad9 | 2009-04-16 03:18:22 +0000 | [diff] [blame] | 10779 | } |
| 10780 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 10781 | bool ScalarEvolution::hasLoopInvariantBackedgeTakenCount(const Loop *L) { |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 10782 | return !isa<SCEVCouldNotCompute>(getBackedgeTakenCount(L)); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10783 | } |
| 10784 | |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 10785 | static void PrintLoopInfo(raw_ostream &OS, ScalarEvolution *SE, |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10786 | const Loop *L) { |
| 10787 | // Print all inner loops first |
Benjamin Kramer | aa20915 | 2016-06-26 17:27:42 +0000 | [diff] [blame] | 10788 | for (Loop *I : *L) |
| 10789 | PrintLoopInfo(OS, SE, I); |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10790 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10791 | OS << "Loop "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 10792 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10793 | OS << ": "; |
Chris Lattner | d72c3eb | 2004-04-18 22:14:10 +0000 | [diff] [blame] | 10794 | |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 10795 | SmallVector<BasicBlock *, 8> ExitBlocks; |
Chris Lattner | d72c3eb | 2004-04-18 22:14:10 +0000 | [diff] [blame] | 10796 | L->getExitBlocks(ExitBlocks); |
| 10797 | if (ExitBlocks.size() != 1) |
Nick Lewycky | d1200b0 | 2008-01-02 02:49:20 +0000 | [diff] [blame] | 10798 | OS << "<multiple exits> "; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10799 | |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 10800 | if (SE->hasLoopInvariantBackedgeTakenCount(L)) { |
| 10801 | OS << "backedge-taken count is " << *SE->getBackedgeTakenCount(L); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10802 | } else { |
Dan Gohman | 0bddac1 | 2009-02-24 18:55:53 +0000 | [diff] [blame] | 10803 | OS << "Unpredictable backedge-taken count. "; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10804 | } |
| 10805 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10806 | OS << "\n" |
| 10807 | "Loop "; |
Chandler Carruth | d48cdbf | 2014-01-09 02:29:41 +0000 | [diff] [blame] | 10808 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10809 | OS << ": "; |
Dan Gohman | 6994293 | 2009-06-24 00:33:16 +0000 | [diff] [blame] | 10810 | |
| 10811 | if (!isa<SCEVCouldNotCompute>(SE->getMaxBackedgeTakenCount(L))) { |
| 10812 | OS << "max backedge-taken count is " << *SE->getMaxBackedgeTakenCount(L); |
John Brawn | 84b2183 | 2016-10-21 11:08:48 +0000 | [diff] [blame] | 10813 | if (SE->isBackedgeTakenCountMaxOrZero(L)) |
| 10814 | OS << ", actual taken count either this or zero."; |
Dan Gohman | 6994293 | 2009-06-24 00:33:16 +0000 | [diff] [blame] | 10815 | } else { |
| 10816 | OS << "Unpredictable max backedge-taken count. "; |
| 10817 | } |
| 10818 | |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 10819 | OS << "\n" |
| 10820 | "Loop "; |
| 10821 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
| 10822 | OS << ": "; |
| 10823 | |
| 10824 | SCEVUnionPredicate Pred; |
| 10825 | auto PBT = SE->getPredicatedBackedgeTakenCount(L, Pred); |
| 10826 | if (!isa<SCEVCouldNotCompute>(PBT)) { |
| 10827 | OS << "Predicated backedge-taken count is " << *PBT << "\n"; |
| 10828 | OS << " Predicates:\n"; |
| 10829 | Pred.print(OS, 4); |
| 10830 | } else { |
| 10831 | OS << "Unpredictable predicated backedge-taken count. "; |
| 10832 | } |
Dan Gohman | 6994293 | 2009-06-24 00:33:16 +0000 | [diff] [blame] | 10833 | OS << "\n"; |
Eli Friedman | b1578d3 | 2017-03-20 20:25:46 +0000 | [diff] [blame] | 10834 | |
| 10835 | if (SE->hasLoopInvariantBackedgeTakenCount(L)) { |
| 10836 | OS << "Loop "; |
| 10837 | L->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
| 10838 | OS << ": "; |
| 10839 | OS << "Trip multiple is " << SE->getSmallConstantTripMultiple(L) << "\n"; |
| 10840 | } |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10841 | } |
| 10842 | |
Sanjoy Das | f2f00fb1 | 2016-05-01 04:51:05 +0000 | [diff] [blame] | 10843 | static StringRef loopDispositionToStr(ScalarEvolution::LoopDisposition LD) { |
| 10844 | switch (LD) { |
| 10845 | case ScalarEvolution::LoopVariant: |
| 10846 | return "Variant"; |
| 10847 | case ScalarEvolution::LoopInvariant: |
| 10848 | return "Invariant"; |
| 10849 | case ScalarEvolution::LoopComputable: |
| 10850 | return "Computable"; |
| 10851 | } |
Simon Pilgrim | 33ae13d | 2016-05-01 15:52:31 +0000 | [diff] [blame] | 10852 | llvm_unreachable("Unknown ScalarEvolution::LoopDisposition kind!"); |
Sanjoy Das | f2f00fb1 | 2016-05-01 04:51:05 +0000 | [diff] [blame] | 10853 | } |
| 10854 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10855 | void ScalarEvolution::print(raw_ostream &OS) const { |
Dan Gohman | 8b0a419 | 2010-03-01 17:49:51 +0000 | [diff] [blame] | 10856 | // ScalarEvolution's implementation of the print method is to print |
Dan Gohman | c8e2362 | 2009-04-21 23:15:49 +0000 | [diff] [blame] | 10857 | // out SCEV values of all instructions that are interesting. Doing |
| 10858 | // this potentially causes it to create new SCEV objects though, |
| 10859 | // which technically conflicts with the const qualifier. This isn't |
Dan Gohman | 028e615 | 2009-07-10 20:25:29 +0000 | [diff] [blame] | 10860 | // observable from outside the class though, so casting away the |
| 10861 | // const isn't dangerous. |
Dan Gohman | cb0efec | 2009-12-18 01:14:11 +0000 | [diff] [blame] | 10862 | ScalarEvolution &SE = *const_cast<ScalarEvolution *>(this); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10863 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10864 | OS << "Classifying expressions for: "; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10865 | F.printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10866 | OS << "\n"; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 10867 | for (Instruction &I : instructions(F)) |
| 10868 | if (isSCEVable(I.getType()) && !isa<CmpInst>(I)) { |
| 10869 | OS << I << '\n'; |
Dan Gohman | 81313fd | 2008-09-14 17:21:12 +0000 | [diff] [blame] | 10870 | OS << " --> "; |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 10871 | const SCEV *SV = SE.getSCEV(&I); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10872 | SV->print(OS); |
Sanjoy Das | f257452 | 2015-03-09 21:43:39 +0000 | [diff] [blame] | 10873 | if (!isa<SCEVCouldNotCompute>(SV)) { |
| 10874 | OS << " U: "; |
| 10875 | SE.getUnsignedRange(SV).print(OS); |
| 10876 | OS << " S: "; |
| 10877 | SE.getSignedRange(SV).print(OS); |
| 10878 | } |
Misha Brukman | 01808ca | 2005-04-21 21:13:18 +0000 | [diff] [blame] | 10879 | |
Sanjoy Das | d9f6d33 | 2015-10-18 00:29:16 +0000 | [diff] [blame] | 10880 | const Loop *L = LI.getLoopFor(I.getParent()); |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 10881 | |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 10882 | const SCEV *AtUse = SE.getSCEVAtScope(SV, L); |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 10883 | if (AtUse != SV) { |
| 10884 | OS << " --> "; |
| 10885 | AtUse->print(OS); |
Sanjoy Das | f257452 | 2015-03-09 21:43:39 +0000 | [diff] [blame] | 10886 | if (!isa<SCEVCouldNotCompute>(AtUse)) { |
| 10887 | OS << " U: "; |
| 10888 | SE.getUnsignedRange(AtUse).print(OS); |
| 10889 | OS << " S: "; |
| 10890 | SE.getSignedRange(AtUse).print(OS); |
| 10891 | } |
Dan Gohman | b9063a8 | 2009-06-19 17:49:54 +0000 | [diff] [blame] | 10892 | } |
| 10893 | |
| 10894 | if (L) { |
Dan Gohman | 94c468f | 2009-06-18 00:37:45 +0000 | [diff] [blame] | 10895 | OS << "\t\t" "Exits: "; |
Dan Gohman | af75234 | 2009-07-07 17:06:11 +0000 | [diff] [blame] | 10896 | const SCEV *ExitValue = SE.getSCEVAtScope(SV, L->getParentLoop()); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10897 | if (!SE.isLoopInvariant(ExitValue, L)) { |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10898 | OS << "<<Unknown>>"; |
| 10899 | } else { |
| 10900 | OS << *ExitValue; |
| 10901 | } |
Sanjoy Das | f2f00fb1 | 2016-05-01 04:51:05 +0000 | [diff] [blame] | 10902 | |
| 10903 | bool First = true; |
| 10904 | for (auto *Iter = L; Iter; Iter = Iter->getParentLoop()) { |
| 10905 | if (First) { |
Sanjoy Das | 013a4ac | 2016-05-03 17:49:57 +0000 | [diff] [blame] | 10906 | OS << "\t\t" "LoopDispositions: { "; |
Sanjoy Das | f2f00fb1 | 2016-05-01 04:51:05 +0000 | [diff] [blame] | 10907 | First = false; |
| 10908 | } else { |
| 10909 | OS << ", "; |
| 10910 | } |
| 10911 | |
Sanjoy Das | 013a4ac | 2016-05-03 17:49:57 +0000 | [diff] [blame] | 10912 | Iter->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
| 10913 | OS << ": " << loopDispositionToStr(SE.getLoopDisposition(SV, Iter)); |
Sanjoy Das | f2f00fb1 | 2016-05-01 04:51:05 +0000 | [diff] [blame] | 10914 | } |
| 10915 | |
Sanjoy Das | 013a4ac | 2016-05-03 17:49:57 +0000 | [diff] [blame] | 10916 | for (auto *InnerL : depth_first(L)) { |
| 10917 | if (InnerL == L) |
| 10918 | continue; |
| 10919 | if (First) { |
| 10920 | OS << "\t\t" "LoopDispositions: { "; |
| 10921 | First = false; |
| 10922 | } else { |
| 10923 | OS << ", "; |
| 10924 | } |
| 10925 | |
| 10926 | InnerL->getHeader()->printAsOperand(OS, /*PrintType=*/false); |
| 10927 | OS << ": " << loopDispositionToStr(SE.getLoopDisposition(SV, InnerL)); |
| 10928 | } |
| 10929 | |
| 10930 | OS << " }"; |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10931 | } |
| 10932 | |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10933 | OS << "\n"; |
| 10934 | } |
| 10935 | |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10936 | OS << "Determining loop execution counts for: "; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 10937 | F.printAsOperand(OS, /*PrintType=*/false); |
Dan Gohman | bc69491 | 2010-01-09 18:17:45 +0000 | [diff] [blame] | 10938 | OS << "\n"; |
Benjamin Kramer | aa20915 | 2016-06-26 17:27:42 +0000 | [diff] [blame] | 10939 | for (Loop *I : LI) |
| 10940 | PrintLoopInfo(OS, &SE, I); |
Chris Lattner | d934c70 | 2004-04-02 20:23:17 +0000 | [diff] [blame] | 10941 | } |
Dan Gohman | e20f824 | 2009-04-21 00:47:46 +0000 | [diff] [blame] | 10942 | |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10943 | ScalarEvolution::LoopDisposition |
| 10944 | ScalarEvolution::getLoopDisposition(const SCEV *S, const Loop *L) { |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 10945 | auto &Values = LoopDispositions[S]; |
| 10946 | for (auto &V : Values) { |
| 10947 | if (V.getPointer() == L) |
| 10948 | return V.getInt(); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 10949 | } |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 10950 | Values.emplace_back(L, LoopVariant); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10951 | LoopDisposition D = computeLoopDisposition(S, L); |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 10952 | auto &Values2 = LoopDispositions[S]; |
| 10953 | for (auto &V : make_range(Values2.rbegin(), Values2.rend())) { |
| 10954 | if (V.getPointer() == L) { |
| 10955 | V.setInt(D); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 10956 | break; |
| 10957 | } |
| 10958 | } |
| 10959 | return D; |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10960 | } |
| 10961 | |
| 10962 | ScalarEvolution::LoopDisposition |
| 10963 | ScalarEvolution::computeLoopDisposition(const SCEV *S, const Loop *L) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 10964 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10965 | case scConstant: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10966 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10967 | case scTruncate: |
| 10968 | case scZeroExtend: |
| 10969 | case scSignExtend: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10970 | return getLoopDisposition(cast<SCEVCastExpr>(S)->getOperand(), L); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10971 | case scAddRecExpr: { |
| 10972 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(S); |
| 10973 | |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10974 | // If L is the addrec's loop, it's computable. |
| 10975 | if (AR->getLoop() == L) |
| 10976 | return LoopComputable; |
| 10977 | |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10978 | // Add recurrences are never invariant in the function-body (null loop). |
| 10979 | if (!L) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10980 | return LoopVariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10981 | |
Max Kazantsev | 23044fa | 2017-11-22 06:21:39 +0000 | [diff] [blame] | 10982 | // Everything that is not defined at loop entry is variant. |
| 10983 | if (DT.dominates(L->getHeader(), AR->getLoop()->getHeader())) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10984 | return LoopVariant; |
Max Kazantsev | 23044fa | 2017-11-22 06:21:39 +0000 | [diff] [blame] | 10985 | assert(!L->contains(AR->getLoop()) && "Containing loop's header does not" |
| 10986 | " dominate the contained loop's header?"); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10987 | |
| 10988 | // This recurrence is invariant w.r.t. L if AR's loop contains L. |
| 10989 | if (AR->getLoop()->contains(L)) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10990 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10991 | |
| 10992 | // This recurrence is variant w.r.t. L if any of its operands |
| 10993 | // are variant. |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 10994 | for (auto *Op : AR->operands()) |
| 10995 | if (!isLoopInvariant(Op, L)) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10996 | return LoopVariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 10997 | |
| 10998 | // Otherwise it's loop-invariant. |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 10999 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11000 | } |
| 11001 | case scAddExpr: |
| 11002 | case scMulExpr: |
| 11003 | case scUMaxExpr: |
| 11004 | case scSMaxExpr: { |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11005 | bool HasVarying = false; |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 11006 | for (auto *Op : cast<SCEVNAryExpr>(S)->operands()) { |
| 11007 | LoopDisposition D = getLoopDisposition(Op, L); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11008 | if (D == LoopVariant) |
| 11009 | return LoopVariant; |
| 11010 | if (D == LoopComputable) |
| 11011 | HasVarying = true; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11012 | } |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11013 | return HasVarying ? LoopComputable : LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11014 | } |
| 11015 | case scUDivExpr: { |
| 11016 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(S); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11017 | LoopDisposition LD = getLoopDisposition(UDiv->getLHS(), L); |
| 11018 | if (LD == LoopVariant) |
| 11019 | return LoopVariant; |
| 11020 | LoopDisposition RD = getLoopDisposition(UDiv->getRHS(), L); |
| 11021 | if (RD == LoopVariant) |
| 11022 | return LoopVariant; |
| 11023 | return (LD == LoopInvariant && RD == LoopInvariant) ? |
| 11024 | LoopInvariant : LoopComputable; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11025 | } |
| 11026 | case scUnknown: |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11027 | // All non-instruction values are loop invariant. All instructions are loop |
| 11028 | // invariant if they are not contained in the specified loop. |
| 11029 | // Instructions are never considered invariant in the function body |
| 11030 | // (null loop) because they are defined within the "loop". |
Sanjoy Das | 0194743 | 2015-11-22 21:20:13 +0000 | [diff] [blame] | 11031 | if (auto *I = dyn_cast<Instruction>(cast<SCEVUnknown>(S)->getValue())) |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11032 | return (L && !L->contains(I)) ? LoopInvariant : LoopVariant; |
| 11033 | return LoopInvariant; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11034 | case scCouldNotCompute: |
| 11035 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11036 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 11037 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 7ee1bbb | 2010-11-17 23:21:44 +0000 | [diff] [blame] | 11038 | } |
| 11039 | |
| 11040 | bool ScalarEvolution::isLoopInvariant(const SCEV *S, const Loop *L) { |
| 11041 | return getLoopDisposition(S, L) == LoopInvariant; |
| 11042 | } |
| 11043 | |
| 11044 | bool ScalarEvolution::hasComputableLoopEvolution(const SCEV *S, const Loop *L) { |
| 11045 | return getLoopDisposition(S, L) == LoopComputable; |
Dan Gohman | afd6db9 | 2010-11-17 21:23:15 +0000 | [diff] [blame] | 11046 | } |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11047 | |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11048 | ScalarEvolution::BlockDisposition |
| 11049 | ScalarEvolution::getBlockDisposition(const SCEV *S, const BasicBlock *BB) { |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 11050 | auto &Values = BlockDispositions[S]; |
| 11051 | for (auto &V : Values) { |
| 11052 | if (V.getPointer() == BB) |
| 11053 | return V.getInt(); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 11054 | } |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 11055 | Values.emplace_back(BB, DoesNotDominateBlock); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11056 | BlockDisposition D = computeBlockDisposition(S, BB); |
Benjamin Kramer | d7e331e | 2015-02-07 16:41:12 +0000 | [diff] [blame] | 11057 | auto &Values2 = BlockDispositions[S]; |
| 11058 | for (auto &V : make_range(Values2.rbegin(), Values2.rend())) { |
| 11059 | if (V.getPointer() == BB) { |
| 11060 | V.setInt(D); |
Wan Xiaofei | b2c8cdc | 2013-11-12 09:40:41 +0000 | [diff] [blame] | 11061 | break; |
| 11062 | } |
| 11063 | } |
| 11064 | return D; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11065 | } |
| 11066 | |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11067 | ScalarEvolution::BlockDisposition |
| 11068 | ScalarEvolution::computeBlockDisposition(const SCEV *S, const BasicBlock *BB) { |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 11069 | switch (static_cast<SCEVTypes>(S->getSCEVType())) { |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11070 | case scConstant: |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11071 | return ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11072 | case scTruncate: |
| 11073 | case scZeroExtend: |
| 11074 | case scSignExtend: |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11075 | return getBlockDisposition(cast<SCEVCastExpr>(S)->getOperand(), BB); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11076 | case scAddRecExpr: { |
| 11077 | // This uses a "dominates" query instead of "properly dominates" query |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11078 | // to test for proper dominance too, because the instruction which |
| 11079 | // produces the addrec's value is a PHI, and a PHI effectively properly |
| 11080 | // dominates its entire containing block. |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11081 | const SCEVAddRecExpr *AR = cast<SCEVAddRecExpr>(S); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11082 | if (!DT.dominates(AR->getLoop()->getHeader(), BB)) |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11083 | return DoesNotDominateBlock; |
Justin Bogner | cd1d5aa | 2016-08-17 20:30:52 +0000 | [diff] [blame] | 11084 | |
| 11085 | // Fall through into SCEVNAryExpr handling. |
| 11086 | LLVM_FALLTHROUGH; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11087 | } |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11088 | case scAddExpr: |
| 11089 | case scMulExpr: |
| 11090 | case scUMaxExpr: |
| 11091 | case scSMaxExpr: { |
| 11092 | const SCEVNAryExpr *NAry = cast<SCEVNAryExpr>(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11093 | bool Proper = true; |
Sanjoy Das | d87e435 | 2015-12-08 22:53:36 +0000 | [diff] [blame] | 11094 | for (const SCEV *NAryOp : NAry->operands()) { |
| 11095 | BlockDisposition D = getBlockDisposition(NAryOp, BB); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11096 | if (D == DoesNotDominateBlock) |
| 11097 | return DoesNotDominateBlock; |
| 11098 | if (D == DominatesBlock) |
| 11099 | Proper = false; |
| 11100 | } |
| 11101 | return Proper ? ProperlyDominatesBlock : DominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11102 | } |
| 11103 | case scUDivExpr: { |
| 11104 | const SCEVUDivExpr *UDiv = cast<SCEVUDivExpr>(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11105 | const SCEV *LHS = UDiv->getLHS(), *RHS = UDiv->getRHS(); |
| 11106 | BlockDisposition LD = getBlockDisposition(LHS, BB); |
| 11107 | if (LD == DoesNotDominateBlock) |
| 11108 | return DoesNotDominateBlock; |
| 11109 | BlockDisposition RD = getBlockDisposition(RHS, BB); |
| 11110 | if (RD == DoesNotDominateBlock) |
| 11111 | return DoesNotDominateBlock; |
| 11112 | return (LD == ProperlyDominatesBlock && RD == ProperlyDominatesBlock) ? |
| 11113 | ProperlyDominatesBlock : DominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11114 | } |
| 11115 | case scUnknown: |
| 11116 | if (Instruction *I = |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11117 | dyn_cast<Instruction>(cast<SCEVUnknown>(S)->getValue())) { |
| 11118 | if (I->getParent() == BB) |
| 11119 | return DominatesBlock; |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11120 | if (DT.properlyDominates(I->getParent(), BB)) |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11121 | return ProperlyDominatesBlock; |
| 11122 | return DoesNotDominateBlock; |
| 11123 | } |
| 11124 | return ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11125 | case scCouldNotCompute: |
| 11126 | llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11127 | } |
Benjamin Kramer | 987b850 | 2014-02-11 19:02:55 +0000 | [diff] [blame] | 11128 | llvm_unreachable("Unknown SCEV kind!"); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11129 | } |
| 11130 | |
| 11131 | bool ScalarEvolution::dominates(const SCEV *S, const BasicBlock *BB) { |
| 11132 | return getBlockDisposition(S, BB) >= DominatesBlock; |
| 11133 | } |
| 11134 | |
| 11135 | bool ScalarEvolution::properlyDominates(const SCEV *S, const BasicBlock *BB) { |
| 11136 | return getBlockDisposition(S, BB) == ProperlyDominatesBlock; |
Dan Gohman | 20d9ce2 | 2010-11-17 21:41:58 +0000 | [diff] [blame] | 11137 | } |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 11138 | |
| 11139 | bool ScalarEvolution::hasOperand(const SCEV *S, const SCEV *Op) const { |
Sanjoy Das | 6b46a0d | 2016-11-09 18:22:43 +0000 | [diff] [blame] | 11140 | return SCEVExprContains(S, [&](const SCEV *Expr) { return Expr == Op; }); |
Dan Gohman | 534749b | 2010-11-17 22:27:42 +0000 | [diff] [blame] | 11141 | } |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 11142 | |
Max Kazantsev | 2cb3653 | 2017-08-03 08:41:30 +0000 | [diff] [blame] | 11143 | bool ScalarEvolution::ExitLimit::hasOperand(const SCEV *S) const { |
| 11144 | auto IsS = [&](const SCEV *X) { return S == X; }; |
| 11145 | auto ContainsS = [&](const SCEV *X) { |
| 11146 | return !isa<SCEVCouldNotCompute>(X) && SCEVExprContains(X, IsS); |
| 11147 | }; |
| 11148 | return ContainsS(ExactNotTaken) || ContainsS(MaxNotTaken); |
| 11149 | } |
| 11150 | |
| 11151 | void |
Sanjoy Das | 7e36337 | 2017-12-04 19:22:00 +0000 | [diff] [blame] | 11152 | ScalarEvolution::forgetMemoizedResults(const SCEV *S) { |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 11153 | ValuesAtScopes.erase(S); |
| 11154 | LoopDispositions.erase(S); |
Dan Gohman | 8ea83d8 | 2010-11-18 00:34:22 +0000 | [diff] [blame] | 11155 | BlockDispositions.erase(S); |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 11156 | UnsignedRanges.erase(S); |
| 11157 | SignedRanges.erase(S); |
Wei Mi | a49559b | 2016-02-04 01:27:38 +0000 | [diff] [blame] | 11158 | ExprValueMap.erase(S); |
| 11159 | HasRecMap.erase(S); |
Igor Laevsky | c11c1ed | 2017-02-14 15:53:12 +0000 | [diff] [blame] | 11160 | MinTrailingZerosCache.erase(S); |
Andrew Trick | 9093e15 | 2013-03-26 03:14:53 +0000 | [diff] [blame] | 11161 | |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 11162 | for (auto I = PredicatedSCEVRewrites.begin(); |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11163 | I != PredicatedSCEVRewrites.end();) { |
| 11164 | std::pair<const SCEV *, const Loop *> Entry = I->first; |
| 11165 | if (Entry.first == S) |
| 11166 | PredicatedSCEVRewrites.erase(I++); |
| 11167 | else |
| 11168 | ++I; |
| 11169 | } |
| 11170 | |
Sanjoy Das | 3a5e252 | 2017-10-17 01:03:56 +0000 | [diff] [blame] | 11171 | auto RemoveSCEVFromBackedgeMap = |
| 11172 | [S, this](DenseMap<const Loop *, BackedgeTakenInfo> &Map) { |
| 11173 | for (auto I = Map.begin(), E = Map.end(); I != E;) { |
| 11174 | BackedgeTakenInfo &BEInfo = I->second; |
| 11175 | if (BEInfo.hasOperand(S, this)) { |
| 11176 | BEInfo.clear(); |
| 11177 | Map.erase(I++); |
| 11178 | } else |
| 11179 | ++I; |
| 11180 | } |
| 11181 | }; |
| 11182 | |
| 11183 | RemoveSCEVFromBackedgeMap(BackedgeTakenCounts); |
| 11184 | RemoveSCEVFromBackedgeMap(PredicatedBackedgeTakenCounts); |
Dan Gohman | 7e6b393 | 2010-11-17 23:28:48 +0000 | [diff] [blame] | 11185 | } |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11186 | |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 11187 | void ScalarEvolution::addToLoopUseLists(const SCEV *S) { |
Sanjoy Das | 3a5e252 | 2017-10-17 01:03:56 +0000 | [diff] [blame] | 11188 | struct FindUsedLoops { |
| 11189 | SmallPtrSet<const Loop *, 8> LoopsUsed; |
| 11190 | bool follow(const SCEV *S) { |
| 11191 | if (auto *AR = dyn_cast<SCEVAddRecExpr>(S)) |
| 11192 | LoopsUsed.insert(AR->getLoop()); |
| 11193 | return true; |
| 11194 | } |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 11195 | |
Sanjoy Das | 3a5e252 | 2017-10-17 01:03:56 +0000 | [diff] [blame] | 11196 | bool isDone() const { return false; } |
| 11197 | }; |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 11198 | |
Sanjoy Das | 3a5e252 | 2017-10-17 01:03:56 +0000 | [diff] [blame] | 11199 | FindUsedLoops F; |
| 11200 | SCEVTraversal<FindUsedLoops>(F).visitAll(S); |
| 11201 | |
| 11202 | for (auto *L : F.LoopsUsed) |
| 11203 | LoopUsers[L].push_back(S); |
Sanjoy Das | e6b995f | 2017-10-13 05:50:52 +0000 | [diff] [blame] | 11204 | } |
| 11205 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11206 | void ScalarEvolution::verify() const { |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11207 | ScalarEvolution &SE = *const_cast<ScalarEvolution *>(this); |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 11208 | ScalarEvolution SE2(F, TLI, AC, DT, LI); |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11209 | |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11210 | SmallVector<Loop *, 8> LoopStack(LI.begin(), LI.end()); |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11211 | |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11212 | // Map's SCEV expressions from one ScalarEvolution "universe" to another. |
| 11213 | struct SCEVMapper : public SCEVRewriteVisitor<SCEVMapper> { |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11214 | SCEVMapper(ScalarEvolution &SE) : SCEVRewriteVisitor<SCEVMapper>(SE) {} |
| 11215 | |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11216 | const SCEV *visitConstant(const SCEVConstant *Constant) { |
| 11217 | return SE.getConstant(Constant->getAPInt()); |
| 11218 | } |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11219 | |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11220 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
| 11221 | return SE.getUnknown(Expr->getValue()); |
| 11222 | } |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11223 | |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11224 | const SCEV *visitCouldNotCompute(const SCEVCouldNotCompute *Expr) { |
| 11225 | return SE.getCouldNotCompute(); |
| 11226 | } |
Sanjoy Das | 148e49f | 2017-04-23 23:04:45 +0000 | [diff] [blame] | 11227 | }; |
| 11228 | |
| 11229 | SCEVMapper SCM(SE2); |
| 11230 | |
| 11231 | while (!LoopStack.empty()) { |
| 11232 | auto *L = LoopStack.pop_back_val(); |
| 11233 | LoopStack.insert(LoopStack.end(), L->begin(), L->end()); |
| 11234 | |
| 11235 | auto *CurBECount = SCM.visit( |
| 11236 | const_cast<ScalarEvolution *>(this)->getBackedgeTakenCount(L)); |
| 11237 | auto *NewBECount = SE2.getBackedgeTakenCount(L); |
| 11238 | |
| 11239 | if (CurBECount == SE2.getCouldNotCompute() || |
| 11240 | NewBECount == SE2.getCouldNotCompute()) { |
| 11241 | // NB! This situation is legal, but is very suspicious -- whatever pass |
| 11242 | // change the loop to make a trip count go from could not compute to |
| 11243 | // computable or vice-versa *should have* invalidated SCEV. However, we |
| 11244 | // choose not to assert here (for now) since we don't want false |
| 11245 | // positives. |
| 11246 | continue; |
| 11247 | } |
| 11248 | |
| 11249 | if (containsUndefs(CurBECount) || containsUndefs(NewBECount)) { |
| 11250 | // SCEV treats "undef" as an unknown but consistent value (i.e. it does |
| 11251 | // not propagate undef aggressively). This means we can (and do) fail |
| 11252 | // verification in cases where a transform makes the trip count of a loop |
| 11253 | // go from "undef" to "undef+1" (say). The transform is fine, since in |
| 11254 | // both cases the loop iterates "undef" times, but SCEV thinks we |
| 11255 | // increased the trip count of the loop by 1 incorrectly. |
| 11256 | continue; |
| 11257 | } |
| 11258 | |
| 11259 | if (SE.getTypeSizeInBits(CurBECount->getType()) > |
| 11260 | SE.getTypeSizeInBits(NewBECount->getType())) |
| 11261 | NewBECount = SE2.getZeroExtendExpr(NewBECount, CurBECount->getType()); |
| 11262 | else if (SE.getTypeSizeInBits(CurBECount->getType()) < |
| 11263 | SE.getTypeSizeInBits(NewBECount->getType())) |
| 11264 | CurBECount = SE2.getZeroExtendExpr(CurBECount, NewBECount->getType()); |
| 11265 | |
| 11266 | auto *ConstantDelta = |
| 11267 | dyn_cast<SCEVConstant>(SE2.getMinusSCEV(CurBECount, NewBECount)); |
| 11268 | |
| 11269 | if (ConstantDelta && ConstantDelta->getAPInt() != 0) { |
| 11270 | dbgs() << "Trip Count Changed!\n"; |
| 11271 | dbgs() << "Old: " << *CurBECount << "\n"; |
| 11272 | dbgs() << "New: " << *NewBECount << "\n"; |
| 11273 | dbgs() << "Delta: " << *ConstantDelta << "\n"; |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11274 | std::abort(); |
| 11275 | } |
| 11276 | } |
Benjamin Kramer | 214935e | 2012-10-26 17:31:32 +0000 | [diff] [blame] | 11277 | } |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11278 | |
Chandler Carruth | 082c183 | 2017-01-09 07:44:34 +0000 | [diff] [blame] | 11279 | bool ScalarEvolution::invalidate( |
| 11280 | Function &F, const PreservedAnalyses &PA, |
| 11281 | FunctionAnalysisManager::Invalidator &Inv) { |
| 11282 | // Invalidate the ScalarEvolution object whenever it isn't preserved or one |
| 11283 | // of its dependencies is invalidated. |
| 11284 | auto PAC = PA.getChecker<ScalarEvolutionAnalysis>(); |
| 11285 | return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>()) || |
| 11286 | Inv.invalidate<AssumptionAnalysis>(F, PA) || |
| 11287 | Inv.invalidate<DominatorTreeAnalysis>(F, PA) || |
| 11288 | Inv.invalidate<LoopAnalysis>(F, PA); |
| 11289 | } |
| 11290 | |
Chandler Carruth | dab4eae | 2016-11-23 17:53:26 +0000 | [diff] [blame] | 11291 | AnalysisKey ScalarEvolutionAnalysis::Key; |
NAKAMURA Takumi | df0cd72 | 2016-02-28 17:17:00 +0000 | [diff] [blame] | 11292 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11293 | ScalarEvolution ScalarEvolutionAnalysis::run(Function &F, |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 11294 | FunctionAnalysisManager &AM) { |
Chandler Carruth | b47f801 | 2016-03-11 11:05:24 +0000 | [diff] [blame] | 11295 | return ScalarEvolution(F, AM.getResult<TargetLibraryAnalysis>(F), |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 11296 | AM.getResult<AssumptionAnalysis>(F), |
Chandler Carruth | b47f801 | 2016-03-11 11:05:24 +0000 | [diff] [blame] | 11297 | AM.getResult<DominatorTreeAnalysis>(F), |
| 11298 | AM.getResult<LoopAnalysis>(F)); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11299 | } |
| 11300 | |
| 11301 | PreservedAnalyses |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 11302 | ScalarEvolutionPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { |
Chandler Carruth | b47f801 | 2016-03-11 11:05:24 +0000 | [diff] [blame] | 11303 | AM.getResult<ScalarEvolutionAnalysis>(F).print(OS); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11304 | return PreservedAnalyses::all(); |
| 11305 | } |
| 11306 | |
| 11307 | INITIALIZE_PASS_BEGIN(ScalarEvolutionWrapperPass, "scalar-evolution", |
| 11308 | "Scalar Evolution Analysis", false, true) |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 11309 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11310 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
| 11311 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) |
| 11312 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) |
| 11313 | INITIALIZE_PASS_END(ScalarEvolutionWrapperPass, "scalar-evolution", |
| 11314 | "Scalar Evolution Analysis", false, true) |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11315 | |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11316 | char ScalarEvolutionWrapperPass::ID = 0; |
| 11317 | |
| 11318 | ScalarEvolutionWrapperPass::ScalarEvolutionWrapperPass() : FunctionPass(ID) { |
| 11319 | initializeScalarEvolutionWrapperPassPass(*PassRegistry::getPassRegistry()); |
| 11320 | } |
| 11321 | |
| 11322 | bool ScalarEvolutionWrapperPass::runOnFunction(Function &F) { |
| 11323 | SE.reset(new ScalarEvolution( |
| 11324 | F, getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(), |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 11325 | getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F), |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11326 | getAnalysis<DominatorTreeWrapperPass>().getDomTree(), |
| 11327 | getAnalysis<LoopInfoWrapperPass>().getLoopInfo())); |
| 11328 | return false; |
| 11329 | } |
| 11330 | |
| 11331 | void ScalarEvolutionWrapperPass::releaseMemory() { SE.reset(); } |
| 11332 | |
| 11333 | void ScalarEvolutionWrapperPass::print(raw_ostream &OS, const Module *) const { |
| 11334 | SE->print(OS); |
| 11335 | } |
| 11336 | |
| 11337 | void ScalarEvolutionWrapperPass::verifyAnalysis() const { |
| 11338 | if (!VerifySCEV) |
| 11339 | return; |
| 11340 | |
| 11341 | SE->verify(); |
| 11342 | } |
| 11343 | |
| 11344 | void ScalarEvolutionWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const { |
| 11345 | AU.setPreservesAll(); |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 11346 | AU.addRequiredTransitive<AssumptionCacheTracker>(); |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 11347 | AU.addRequiredTransitive<LoopInfoWrapperPass>(); |
| 11348 | AU.addRequiredTransitive<DominatorTreeWrapperPass>(); |
| 11349 | AU.addRequiredTransitive<TargetLibraryInfoWrapperPass>(); |
| 11350 | } |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11351 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11352 | const SCEVPredicate *ScalarEvolution::getEqualPredicate(const SCEV *LHS, |
| 11353 | const SCEV *RHS) { |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11354 | FoldingSetNodeID ID; |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11355 | assert(LHS->getType() == RHS->getType() && |
| 11356 | "Type mismatch between LHS and RHS"); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11357 | // Unique this node based on the arguments |
| 11358 | ID.AddInteger(SCEVPredicate::P_Equal); |
| 11359 | ID.AddPointer(LHS); |
| 11360 | ID.AddPointer(RHS); |
| 11361 | void *IP = nullptr; |
| 11362 | if (const auto *S = UniquePreds.FindNodeOrInsertPos(ID, IP)) |
| 11363 | return S; |
| 11364 | SCEVEqualPredicate *Eq = new (SCEVAllocator) |
| 11365 | SCEVEqualPredicate(ID.Intern(SCEVAllocator), LHS, RHS); |
| 11366 | UniquePreds.InsertNode(Eq, IP); |
| 11367 | return Eq; |
| 11368 | } |
| 11369 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11370 | const SCEVPredicate *ScalarEvolution::getWrapPredicate( |
| 11371 | const SCEVAddRecExpr *AR, |
| 11372 | SCEVWrapPredicate::IncrementWrapFlags AddedFlags) { |
| 11373 | FoldingSetNodeID ID; |
| 11374 | // Unique this node based on the arguments |
| 11375 | ID.AddInteger(SCEVPredicate::P_Wrap); |
| 11376 | ID.AddPointer(AR); |
| 11377 | ID.AddInteger(AddedFlags); |
| 11378 | void *IP = nullptr; |
| 11379 | if (const auto *S = UniquePreds.FindNodeOrInsertPos(ID, IP)) |
| 11380 | return S; |
| 11381 | auto *OF = new (SCEVAllocator) |
| 11382 | SCEVWrapPredicate(ID.Intern(SCEVAllocator), AR, AddedFlags); |
| 11383 | UniquePreds.InsertNode(OF, IP); |
| 11384 | return OF; |
| 11385 | } |
| 11386 | |
Benjamin Kramer | 83709b1 | 2015-11-16 09:01:28 +0000 | [diff] [blame] | 11387 | namespace { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11388 | |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11389 | class SCEVPredicateRewriter : public SCEVRewriteVisitor<SCEVPredicateRewriter> { |
| 11390 | public: |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11391 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11392 | /// Rewrites \p S in the context of a loop L and the SCEV predication |
| 11393 | /// infrastructure. |
| 11394 | /// |
| 11395 | /// If \p Pred is non-null, the SCEV expression is rewritten to respect the |
| 11396 | /// equivalences present in \p Pred. |
| 11397 | /// |
| 11398 | /// If \p NewPreds is non-null, rewrite is free to add further predicates to |
| 11399 | /// \p NewPreds such that the result will be an AddRecExpr. |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 11400 | static const SCEV *rewrite(const SCEV *S, const Loop *L, ScalarEvolution &SE, |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11401 | SmallPtrSetImpl<const SCEVPredicate *> *NewPreds, |
| 11402 | SCEVUnionPredicate *Pred) { |
| 11403 | SCEVPredicateRewriter Rewriter(L, SE, NewPreds, Pred); |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 11404 | return Rewriter.visit(S); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11405 | } |
| 11406 | |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11407 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11408 | if (Pred) { |
| 11409 | auto ExprPreds = Pred->getPredicatesForExpr(Expr); |
| 11410 | for (auto *Pred : ExprPreds) |
| 11411 | if (const auto *IPred = dyn_cast<SCEVEqualPredicate>(Pred)) |
| 11412 | if (IPred->getLHS() == Expr) |
| 11413 | return IPred->getRHS(); |
| 11414 | } |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11415 | return convertToAddRecWithPreds(Expr); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11416 | } |
| 11417 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11418 | const SCEV *visitZeroExtendExpr(const SCEVZeroExtendExpr *Expr) { |
| 11419 | const SCEV *Operand = visit(Expr->getOperand()); |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 11420 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Operand); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11421 | if (AR && AR->getLoop() == L && AR->isAffine()) { |
| 11422 | // This couldn't be folded because the operand didn't have the nuw |
| 11423 | // flag. Add the nusw flag as an assumption that we could make. |
| 11424 | const SCEV *Step = AR->getStepRecurrence(SE); |
| 11425 | Type *Ty = Expr->getType(); |
| 11426 | if (addOverflowAssumption(AR, SCEVWrapPredicate::IncrementNUSW)) |
| 11427 | return SE.getAddRecExpr(SE.getZeroExtendExpr(AR->getStart(), Ty), |
| 11428 | SE.getSignExtendExpr(Step, Ty), L, |
| 11429 | AR->getNoWrapFlags()); |
| 11430 | } |
| 11431 | return SE.getZeroExtendExpr(Operand, Expr->getType()); |
| 11432 | } |
| 11433 | |
| 11434 | const SCEV *visitSignExtendExpr(const SCEVSignExtendExpr *Expr) { |
| 11435 | const SCEV *Operand = visit(Expr->getOperand()); |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 11436 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Operand); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11437 | if (AR && AR->getLoop() == L && AR->isAffine()) { |
| 11438 | // This couldn't be folded because the operand didn't have the nsw |
| 11439 | // flag. Add the nssw flag as an assumption that we could make. |
| 11440 | const SCEV *Step = AR->getStepRecurrence(SE); |
| 11441 | Type *Ty = Expr->getType(); |
| 11442 | if (addOverflowAssumption(AR, SCEVWrapPredicate::IncrementNSSW)) |
| 11443 | return SE.getAddRecExpr(SE.getSignExtendExpr(AR->getStart(), Ty), |
| 11444 | SE.getSignExtendExpr(Step, Ty), L, |
| 11445 | AR->getNoWrapFlags()); |
| 11446 | } |
| 11447 | return SE.getSignExtendExpr(Operand, Expr->getType()); |
| 11448 | } |
| 11449 | |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11450 | private: |
Jatin Bhateja | 7410eea | 2017-11-26 15:08:41 +0000 | [diff] [blame] | 11451 | explicit SCEVPredicateRewriter(const Loop *L, ScalarEvolution &SE, |
| 11452 | SmallPtrSetImpl<const SCEVPredicate *> *NewPreds, |
| 11453 | SCEVUnionPredicate *Pred) |
| 11454 | : SCEVRewriteVisitor(SE), NewPreds(NewPreds), Pred(Pred), L(L) {} |
| 11455 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11456 | bool addOverflowAssumption(const SCEVPredicate *P) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11457 | if (!NewPreds) { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11458 | // Check if we've already made this assumption. |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11459 | return Pred && Pred->implies(P); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11460 | } |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11461 | NewPreds->insert(P); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11462 | return true; |
| 11463 | } |
| 11464 | |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11465 | bool addOverflowAssumption(const SCEVAddRecExpr *AR, |
| 11466 | SCEVWrapPredicate::IncrementWrapFlags AddedFlags) { |
| 11467 | auto *A = SE.getWrapPredicate(AR, AddedFlags); |
| 11468 | return addOverflowAssumption(A); |
| 11469 | } |
| 11470 | |
| 11471 | // If \p Expr represents a PHINode, we try to see if it can be represented |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 11472 | // as an AddRec, possibly under a predicate (PHISCEVPred). If it is possible |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11473 | // to add this predicate as a runtime overflow check, we return the AddRec. |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 11474 | // If \p Expr does not meet these conditions (is not a PHI node, or we |
| 11475 | // couldn't create an AddRec for it, or couldn't add the predicate), we just |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11476 | // return \p Expr. |
| 11477 | const SCEV *convertToAddRecWithPreds(const SCEVUnknown *Expr) { |
| 11478 | if (!isa<PHINode>(Expr->getValue())) |
| 11479 | return Expr; |
| 11480 | Optional<std::pair<const SCEV *, SmallVector<const SCEVPredicate *, 3>>> |
| 11481 | PredicatedRewrite = SE.createAddRecFromPHIWithCasts(Expr); |
| 11482 | if (!PredicatedRewrite) |
| 11483 | return Expr; |
| 11484 | for (auto *P : PredicatedRewrite->second){ |
| 11485 | if (!addOverflowAssumption(P)) |
| 11486 | return Expr; |
| 11487 | } |
| 11488 | return PredicatedRewrite->first; |
| 11489 | } |
Michael Liao | b30286d | 2017-09-25 16:21:21 +0000 | [diff] [blame] | 11490 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11491 | SmallPtrSetImpl<const SCEVPredicate *> *NewPreds; |
| 11492 | SCEVUnionPredicate *Pred; |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11493 | const Loop *L; |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11494 | }; |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11495 | |
Benjamin Kramer | 83709b1 | 2015-11-16 09:01:28 +0000 | [diff] [blame] | 11496 | } // end anonymous namespace |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11497 | |
Sanjoy Das | 807d33d | 2016-02-20 01:44:10 +0000 | [diff] [blame] | 11498 | const SCEV *ScalarEvolution::rewriteUsingPredicate(const SCEV *S, const Loop *L, |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11499 | SCEVUnionPredicate &Preds) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11500 | return SCEVPredicateRewriter::rewrite(S, L, *this, nullptr, &Preds); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11501 | } |
| 11502 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11503 | const SCEVAddRecExpr *ScalarEvolution::convertSCEVToAddRecWithPredicates( |
| 11504 | const SCEV *S, const Loop *L, |
| 11505 | SmallPtrSetImpl<const SCEVPredicate *> &Preds) { |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11506 | SmallPtrSet<const SCEVPredicate *, 4> TransformPreds; |
| 11507 | S = SCEVPredicateRewriter::rewrite(S, L, *this, &TransformPreds, nullptr); |
Silviu Baranga | d68ed85 | 2016-03-23 15:29:30 +0000 | [diff] [blame] | 11508 | auto *AddRec = dyn_cast<SCEVAddRecExpr>(S); |
| 11509 | |
| 11510 | if (!AddRec) |
| 11511 | return nullptr; |
| 11512 | |
| 11513 | // Since the transformation was successful, we can now transfer the SCEV |
| 11514 | // predicates. |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11515 | for (auto *P : TransformPreds) |
| 11516 | Preds.insert(P); |
| 11517 | |
Silviu Baranga | d68ed85 | 2016-03-23 15:29:30 +0000 | [diff] [blame] | 11518 | return AddRec; |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11519 | } |
| 11520 | |
| 11521 | /// SCEV predicates |
| 11522 | SCEVPredicate::SCEVPredicate(const FoldingSetNodeIDRef ID, |
| 11523 | SCEVPredicateKind Kind) |
| 11524 | : FastID(ID), Kind(Kind) {} |
| 11525 | |
| 11526 | SCEVEqualPredicate::SCEVEqualPredicate(const FoldingSetNodeIDRef ID, |
Dorit Nuzman | ca4fd18 | 2017-07-18 11:57:08 +0000 | [diff] [blame] | 11527 | const SCEV *LHS, const SCEV *RHS) |
| 11528 | : SCEVPredicate(ID, P_Equal), LHS(LHS), RHS(RHS) { |
| 11529 | assert(LHS->getType() == RHS->getType() && "LHS and RHS types don't match"); |
| 11530 | assert(LHS != RHS && "LHS and RHS are the same SCEV"); |
| 11531 | } |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11532 | |
| 11533 | bool SCEVEqualPredicate::implies(const SCEVPredicate *N) const { |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 11534 | const auto *Op = dyn_cast<SCEVEqualPredicate>(N); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11535 | |
| 11536 | if (!Op) |
| 11537 | return false; |
| 11538 | |
| 11539 | return Op->LHS == LHS && Op->RHS == RHS; |
| 11540 | } |
| 11541 | |
| 11542 | bool SCEVEqualPredicate::isAlwaysTrue() const { return false; } |
| 11543 | |
| 11544 | const SCEV *SCEVEqualPredicate::getExpr() const { return LHS; } |
| 11545 | |
| 11546 | void SCEVEqualPredicate::print(raw_ostream &OS, unsigned Depth) const { |
| 11547 | OS.indent(Depth) << "Equal predicate: " << *LHS << " == " << *RHS << "\n"; |
| 11548 | } |
| 11549 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11550 | SCEVWrapPredicate::SCEVWrapPredicate(const FoldingSetNodeIDRef ID, |
| 11551 | const SCEVAddRecExpr *AR, |
| 11552 | IncrementWrapFlags Flags) |
| 11553 | : SCEVPredicate(ID, P_Wrap), AR(AR), Flags(Flags) {} |
| 11554 | |
| 11555 | const SCEV *SCEVWrapPredicate::getExpr() const { return AR; } |
| 11556 | |
| 11557 | bool SCEVWrapPredicate::implies(const SCEVPredicate *N) const { |
| 11558 | const auto *Op = dyn_cast<SCEVWrapPredicate>(N); |
| 11559 | |
| 11560 | return Op && Op->AR == AR && setFlags(Flags, Op->Flags) == Flags; |
| 11561 | } |
| 11562 | |
| 11563 | bool SCEVWrapPredicate::isAlwaysTrue() const { |
| 11564 | SCEV::NoWrapFlags ScevFlags = AR->getNoWrapFlags(); |
| 11565 | IncrementWrapFlags IFlags = Flags; |
| 11566 | |
| 11567 | if (ScalarEvolution::setFlags(ScevFlags, SCEV::FlagNSW) == ScevFlags) |
| 11568 | IFlags = clearFlags(IFlags, IncrementNSSW); |
| 11569 | |
| 11570 | return IFlags == IncrementAnyWrap; |
| 11571 | } |
| 11572 | |
| 11573 | void SCEVWrapPredicate::print(raw_ostream &OS, unsigned Depth) const { |
| 11574 | OS.indent(Depth) << *getExpr() << " Added Flags: "; |
| 11575 | if (SCEVWrapPredicate::IncrementNUSW & getFlags()) |
| 11576 | OS << "<nusw>"; |
| 11577 | if (SCEVWrapPredicate::IncrementNSSW & getFlags()) |
| 11578 | OS << "<nssw>"; |
| 11579 | OS << "\n"; |
| 11580 | } |
| 11581 | |
| 11582 | SCEVWrapPredicate::IncrementWrapFlags |
| 11583 | SCEVWrapPredicate::getImpliedFlags(const SCEVAddRecExpr *AR, |
| 11584 | ScalarEvolution &SE) { |
| 11585 | IncrementWrapFlags ImpliedFlags = IncrementAnyWrap; |
| 11586 | SCEV::NoWrapFlags StaticFlags = AR->getNoWrapFlags(); |
| 11587 | |
| 11588 | // We can safely transfer the NSW flag as NSSW. |
| 11589 | if (ScalarEvolution::setFlags(StaticFlags, SCEV::FlagNSW) == StaticFlags) |
| 11590 | ImpliedFlags = IncrementNSSW; |
| 11591 | |
| 11592 | if (ScalarEvolution::setFlags(StaticFlags, SCEV::FlagNUW) == StaticFlags) { |
| 11593 | // If the increment is positive, the SCEV NUW flag will also imply the |
| 11594 | // WrapPredicate NUSW flag. |
| 11595 | if (const auto *Step = dyn_cast<SCEVConstant>(AR->getStepRecurrence(SE))) |
| 11596 | if (Step->getValue()->getValue().isNonNegative()) |
| 11597 | ImpliedFlags = setFlags(ImpliedFlags, IncrementNUSW); |
| 11598 | } |
| 11599 | |
| 11600 | return ImpliedFlags; |
| 11601 | } |
| 11602 | |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11603 | /// Union predicates don't get cached so create a dummy set ID for it. |
| 11604 | SCEVUnionPredicate::SCEVUnionPredicate() |
| 11605 | : SCEVPredicate(FoldingSetNodeIDRef(nullptr, 0), P_Union) {} |
| 11606 | |
| 11607 | bool SCEVUnionPredicate::isAlwaysTrue() const { |
Sanjoy Das | 3b827c7 | 2015-11-29 23:40:53 +0000 | [diff] [blame] | 11608 | return all_of(Preds, |
| 11609 | [](const SCEVPredicate *I) { return I->isAlwaysTrue(); }); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11610 | } |
| 11611 | |
| 11612 | ArrayRef<const SCEVPredicate *> |
| 11613 | SCEVUnionPredicate::getPredicatesForExpr(const SCEV *Expr) { |
| 11614 | auto I = SCEVToPreds.find(Expr); |
| 11615 | if (I == SCEVToPreds.end()) |
| 11616 | return ArrayRef<const SCEVPredicate *>(); |
| 11617 | return I->second; |
| 11618 | } |
| 11619 | |
| 11620 | bool SCEVUnionPredicate::implies(const SCEVPredicate *N) const { |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 11621 | if (const auto *Set = dyn_cast<SCEVUnionPredicate>(N)) |
Sanjoy Das | 3b827c7 | 2015-11-29 23:40:53 +0000 | [diff] [blame] | 11622 | return all_of(Set->Preds, |
| 11623 | [this](const SCEVPredicate *I) { return this->implies(I); }); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11624 | |
| 11625 | auto ScevPredsIt = SCEVToPreds.find(N->getExpr()); |
| 11626 | if (ScevPredsIt == SCEVToPreds.end()) |
| 11627 | return false; |
| 11628 | auto &SCEVPreds = ScevPredsIt->second; |
| 11629 | |
Sanjoy Das | ff3b8b4 | 2015-12-01 07:49:23 +0000 | [diff] [blame] | 11630 | return any_of(SCEVPreds, |
| 11631 | [N](const SCEVPredicate *I) { return I->implies(N); }); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11632 | } |
| 11633 | |
| 11634 | const SCEV *SCEVUnionPredicate::getExpr() const { return nullptr; } |
| 11635 | |
| 11636 | void SCEVUnionPredicate::print(raw_ostream &OS, unsigned Depth) const { |
| 11637 | for (auto Pred : Preds) |
| 11638 | Pred->print(OS, Depth); |
| 11639 | } |
| 11640 | |
| 11641 | void SCEVUnionPredicate::add(const SCEVPredicate *N) { |
Sanjoy Das | b277a42 | 2016-06-15 06:53:55 +0000 | [diff] [blame] | 11642 | if (const auto *Set = dyn_cast<SCEVUnionPredicate>(N)) { |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 11643 | for (auto Pred : Set->Preds) |
| 11644 | add(Pred); |
| 11645 | return; |
| 11646 | } |
| 11647 | |
| 11648 | if (implies(N)) |
| 11649 | return; |
| 11650 | |
| 11651 | const SCEV *Key = N->getExpr(); |
| 11652 | assert(Key && "Only SCEVUnionPredicate doesn't have an " |
| 11653 | " associated expression!"); |
| 11654 | |
| 11655 | SCEVToPreds[Key].push_back(N); |
| 11656 | Preds.push_back(N); |
| 11657 | } |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11658 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11659 | PredicatedScalarEvolution::PredicatedScalarEvolution(ScalarEvolution &SE, |
| 11660 | Loop &L) |
Eugene Zelenko | be709f2 | 2017-08-18 23:51:26 +0000 | [diff] [blame] | 11661 | : SE(SE), L(L) {} |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11662 | |
| 11663 | const SCEV *PredicatedScalarEvolution::getSCEV(Value *V) { |
| 11664 | const SCEV *Expr = SE.getSCEV(V); |
| 11665 | RewriteEntry &Entry = RewriteMap[Expr]; |
| 11666 | |
| 11667 | // If we already have an entry and the version matches, return it. |
| 11668 | if (Entry.second && Generation == Entry.first) |
| 11669 | return Entry.second; |
| 11670 | |
| 11671 | // We found an entry but it's stale. Rewrite the stale entry |
Simon Pilgrim | f2fbf43 | 2016-11-20 13:47:59 +0000 | [diff] [blame] | 11672 | // according to the current predicate. |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11673 | if (Entry.second) |
| 11674 | Expr = Entry.second; |
| 11675 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11676 | const SCEV *NewSCEV = SE.rewriteUsingPredicate(Expr, &L, Preds); |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11677 | Entry = {Generation, NewSCEV}; |
| 11678 | |
| 11679 | return NewSCEV; |
| 11680 | } |
| 11681 | |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 11682 | const SCEV *PredicatedScalarEvolution::getBackedgeTakenCount() { |
| 11683 | if (!BackedgeCount) { |
| 11684 | SCEVUnionPredicate BackedgePred; |
| 11685 | BackedgeCount = SE.getPredicatedBackedgeTakenCount(&L, BackedgePred); |
| 11686 | addPredicate(BackedgePred); |
| 11687 | } |
| 11688 | return BackedgeCount; |
| 11689 | } |
| 11690 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11691 | void PredicatedScalarEvolution::addPredicate(const SCEVPredicate &Pred) { |
| 11692 | if (Preds.implies(&Pred)) |
| 11693 | return; |
| 11694 | Preds.add(&Pred); |
| 11695 | updateGeneration(); |
| 11696 | } |
| 11697 | |
| 11698 | const SCEVUnionPredicate &PredicatedScalarEvolution::getUnionPredicate() const { |
| 11699 | return Preds; |
| 11700 | } |
| 11701 | |
| 11702 | void PredicatedScalarEvolution::updateGeneration() { |
| 11703 | // If the generation number wrapped recompute everything. |
| 11704 | if (++Generation == 0) { |
| 11705 | for (auto &II : RewriteMap) { |
| 11706 | const SCEV *Rewritten = II.second.second; |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11707 | II.second = {Generation, SE.rewriteUsingPredicate(Rewritten, &L, Preds)}; |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 11708 | } |
| 11709 | } |
| 11710 | } |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11711 | |
| 11712 | void PredicatedScalarEvolution::setNoOverflow( |
| 11713 | Value *V, SCEVWrapPredicate::IncrementWrapFlags Flags) { |
| 11714 | const SCEV *Expr = getSCEV(V); |
| 11715 | const auto *AR = cast<SCEVAddRecExpr>(Expr); |
| 11716 | |
| 11717 | auto ImpliedFlags = SCEVWrapPredicate::getImpliedFlags(AR, SE); |
| 11718 | |
| 11719 | // Clear the statically implied flags. |
| 11720 | Flags = SCEVWrapPredicate::clearFlags(Flags, ImpliedFlags); |
| 11721 | addPredicate(*SE.getWrapPredicate(AR, Flags)); |
| 11722 | |
| 11723 | auto II = FlagsMap.insert({V, Flags}); |
| 11724 | if (!II.second) |
| 11725 | II.first->second = SCEVWrapPredicate::setFlags(Flags, II.first->second); |
| 11726 | } |
| 11727 | |
| 11728 | bool PredicatedScalarEvolution::hasNoOverflow( |
| 11729 | Value *V, SCEVWrapPredicate::IncrementWrapFlags Flags) { |
| 11730 | const SCEV *Expr = getSCEV(V); |
| 11731 | const auto *AR = cast<SCEVAddRecExpr>(Expr); |
| 11732 | |
| 11733 | Flags = SCEVWrapPredicate::clearFlags( |
| 11734 | Flags, SCEVWrapPredicate::getImpliedFlags(AR, SE)); |
| 11735 | |
| 11736 | auto II = FlagsMap.find(V); |
| 11737 | |
| 11738 | if (II != FlagsMap.end()) |
| 11739 | Flags = SCEVWrapPredicate::clearFlags(Flags, II->second); |
| 11740 | |
| 11741 | return Flags == SCEVWrapPredicate::IncrementAnyWrap; |
| 11742 | } |
| 11743 | |
Silviu Baranga | d68ed85 | 2016-03-23 15:29:30 +0000 | [diff] [blame] | 11744 | const SCEVAddRecExpr *PredicatedScalarEvolution::getAsAddRec(Value *V) { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11745 | const SCEV *Expr = this->getSCEV(V); |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11746 | SmallPtrSet<const SCEVPredicate *, 4> NewPreds; |
| 11747 | auto *New = SE.convertSCEVToAddRecWithPredicates(Expr, &L, NewPreds); |
Silviu Baranga | d68ed85 | 2016-03-23 15:29:30 +0000 | [diff] [blame] | 11748 | |
| 11749 | if (!New) |
| 11750 | return nullptr; |
| 11751 | |
Sanjoy Das | f002212 | 2016-09-28 17:14:58 +0000 | [diff] [blame] | 11752 | for (auto *P : NewPreds) |
| 11753 | Preds.add(P); |
| 11754 | |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11755 | updateGeneration(); |
| 11756 | RewriteMap[SE.getSCEV(V)] = {Generation, New}; |
| 11757 | return New; |
| 11758 | } |
| 11759 | |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 11760 | PredicatedScalarEvolution::PredicatedScalarEvolution( |
| 11761 | const PredicatedScalarEvolution &Init) |
| 11762 | : RewriteMap(Init.RewriteMap), SE(Init.SE), L(Init.L), Preds(Init.Preds), |
| 11763 | Generation(Init.Generation), BackedgeCount(Init.BackedgeCount) { |
Benjamin Kramer | aa20915 | 2016-06-26 17:27:42 +0000 | [diff] [blame] | 11764 | for (const auto &I : Init.FlagsMap) |
| 11765 | FlagsMap.insert(I); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 11766 | } |
Silviu Baranga | b77365b | 2016-04-14 16:08:45 +0000 | [diff] [blame] | 11767 | |
| 11768 | void PredicatedScalarEvolution::print(raw_ostream &OS, unsigned Depth) const { |
| 11769 | // For each block. |
| 11770 | for (auto *BB : L.getBlocks()) |
| 11771 | for (auto &I : *BB) { |
| 11772 | if (!SE.isSCEVable(I.getType())) |
| 11773 | continue; |
| 11774 | |
| 11775 | auto *Expr = SE.getSCEV(&I); |
| 11776 | auto II = RewriteMap.find(Expr); |
| 11777 | |
| 11778 | if (II == RewriteMap.end()) |
| 11779 | continue; |
| 11780 | |
| 11781 | // Don't print things that are not interesting. |
| 11782 | if (II->second.second == Expr) |
| 11783 | continue; |
| 11784 | |
| 11785 | OS.indent(Depth) << "[PSE]" << I << ":\n"; |
| 11786 | OS.indent(Depth + 2) << *Expr << "\n"; |
| 11787 | OS.indent(Depth + 2) << "--> " << *II->second.second << "\n"; |
| 11788 | } |
| 11789 | } |