Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1 | //===- LoopAccessAnalysis.cpp - Loop Access Analysis Implementation --------==// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // The implementation for the loop memory dependence that was originally |
| 10 | // developed for the loop vectorizer. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 14 | #include "llvm/Analysis/LoopAccessAnalysis.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/APInt.h" |
| 16 | #include "llvm/ADT/DenseMap.h" |
| 17 | #include "llvm/ADT/DepthFirstIterator.h" |
| 18 | #include "llvm/ADT/EquivalenceClasses.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/PointerIntPair.h" |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/STLExtras.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 21 | #include "llvm/ADT/SetVector.h" |
| 22 | #include "llvm/ADT/SmallPtrSet.h" |
| 23 | #include "llvm/ADT/SmallSet.h" |
| 24 | #include "llvm/ADT/SmallVector.h" |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 25 | #include "llvm/ADT/iterator_range.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 26 | #include "llvm/Analysis/AliasAnalysis.h" |
| 27 | #include "llvm/Analysis/AliasSetTracker.h" |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 28 | #include "llvm/Analysis/LoopAnalysisManager.h" |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 29 | #include "llvm/Analysis/LoopInfo.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 30 | #include "llvm/Analysis/MemoryLocation.h" |
Adam Nemet | 0965da2 | 2017-10-09 23:19:02 +0000 | [diff] [blame] | 31 | #include "llvm/Analysis/OptimizationRemarkEmitter.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 32 | #include "llvm/Analysis/ScalarEvolution.h" |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 33 | #include "llvm/Analysis/ScalarEvolutionExpander.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 34 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 35 | #include "llvm/Analysis/TargetLibraryInfo.h" |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 36 | #include "llvm/Analysis/ValueTracking.h" |
Adam Nemet | f45594c | 2016-07-01 00:09:02 +0000 | [diff] [blame] | 37 | #include "llvm/Analysis/VectorUtils.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 38 | #include "llvm/IR/BasicBlock.h" |
| 39 | #include "llvm/IR/Constants.h" |
| 40 | #include "llvm/IR/DataLayout.h" |
| 41 | #include "llvm/IR/DebugLoc.h" |
| 42 | #include "llvm/IR/DerivedTypes.h" |
| 43 | #include "llvm/IR/DiagnosticInfo.h" |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 44 | #include "llvm/IR/Dominators.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 45 | #include "llvm/IR/Function.h" |
Chandler Carruth | 3bab7e1 | 2017-01-11 09:43:56 +0000 | [diff] [blame] | 46 | #include "llvm/IR/IRBuilder.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 47 | #include "llvm/IR/InstrTypes.h" |
| 48 | #include "llvm/IR/Instruction.h" |
| 49 | #include "llvm/IR/Instructions.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 50 | #include "llvm/IR/Operator.h" |
Xinliang David Li | 8a02131 | 2016-07-02 21:18:40 +0000 | [diff] [blame] | 51 | #include "llvm/IR/PassManager.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 52 | #include "llvm/IR/Type.h" |
| 53 | #include "llvm/IR/Value.h" |
| 54 | #include "llvm/IR/ValueHandle.h" |
| 55 | #include "llvm/Pass.h" |
| 56 | #include "llvm/Support/Casting.h" |
| 57 | #include "llvm/Support/CommandLine.h" |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 58 | #include "llvm/Support/Debug.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 59 | #include "llvm/Support/ErrorHandling.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 60 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 61 | #include <algorithm> |
| 62 | #include <cassert> |
| 63 | #include <cstdint> |
| 64 | #include <cstdlib> |
| 65 | #include <iterator> |
| 66 | #include <utility> |
| 67 | #include <vector> |
| 68 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 69 | using namespace llvm; |
| 70 | |
Adam Nemet | 339f42b | 2015-02-19 19:15:07 +0000 | [diff] [blame] | 71 | #define DEBUG_TYPE "loop-accesses" |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 72 | |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 73 | static cl::opt<unsigned, true> |
| 74 | VectorizationFactor("force-vector-width", cl::Hidden, |
| 75 | cl::desc("Sets the SIMD width. Zero is autoselect."), |
| 76 | cl::location(VectorizerParams::VectorizationFactor)); |
Adam Nemet | 1d862af | 2015-02-26 04:39:09 +0000 | [diff] [blame] | 77 | unsigned VectorizerParams::VectorizationFactor; |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 78 | |
| 79 | static cl::opt<unsigned, true> |
| 80 | VectorizationInterleave("force-vector-interleave", cl::Hidden, |
| 81 | cl::desc("Sets the vectorization interleave count. " |
| 82 | "Zero is autoselect."), |
| 83 | cl::location( |
| 84 | VectorizerParams::VectorizationInterleave)); |
Adam Nemet | 1d862af | 2015-02-26 04:39:09 +0000 | [diff] [blame] | 85 | unsigned VectorizerParams::VectorizationInterleave; |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 86 | |
Adam Nemet | 1d862af | 2015-02-26 04:39:09 +0000 | [diff] [blame] | 87 | static cl::opt<unsigned, true> RuntimeMemoryCheckThreshold( |
| 88 | "runtime-memory-check-threshold", cl::Hidden, |
| 89 | cl::desc("When performing memory disambiguation checks at runtime do not " |
| 90 | "generate more than this number of comparisons (default = 8)."), |
| 91 | cl::location(VectorizerParams::RuntimeMemoryCheckThreshold), cl::init(8)); |
| 92 | unsigned VectorizerParams::RuntimeMemoryCheckThreshold; |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 93 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 94 | /// The maximum iterations used to merge memory checks |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 95 | static cl::opt<unsigned> MemoryCheckMergeThreshold( |
| 96 | "memory-check-merge-threshold", cl::Hidden, |
| 97 | cl::desc("Maximum number of comparisons done when trying to merge " |
| 98 | "runtime memory checks. (default = 100)"), |
| 99 | cl::init(100)); |
| 100 | |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 101 | /// Maximum SIMD width. |
| 102 | const unsigned VectorizerParams::MaxVectorWidth = 64; |
| 103 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 104 | /// We collect dependences up to this threshold. |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 105 | static cl::opt<unsigned> |
| 106 | MaxDependences("max-dependences", cl::Hidden, |
| 107 | cl::desc("Maximum number of dependences collected by " |
| 108 | "loop-access analysis (default = 100)"), |
| 109 | cl::init(100)); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 110 | |
Adam Nemet | a9f09c6 | 2016-06-17 22:35:41 +0000 | [diff] [blame] | 111 | /// This enables versioning on the strides of symbolically striding memory |
| 112 | /// accesses in code like the following. |
| 113 | /// for (i = 0; i < N; ++i) |
| 114 | /// A[i * Stride1] += B[i * Stride2] ... |
| 115 | /// |
| 116 | /// Will be roughly translated to |
| 117 | /// if (Stride1 == 1 && Stride2 == 1) { |
| 118 | /// for (i = 0; i < N; i+=4) |
| 119 | /// A[i:i+3] += ... |
| 120 | /// } else |
| 121 | /// ... |
| 122 | static cl::opt<bool> EnableMemAccessVersioning( |
| 123 | "enable-mem-access-versioning", cl::init(true), cl::Hidden, |
| 124 | cl::desc("Enable symbolic stride memory access versioning")); |
| 125 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 126 | /// Enable store-to-load forwarding conflict detection. This option can |
Matthew Simpson | 37ec5f9 | 2016-05-16 17:00:56 +0000 | [diff] [blame] | 127 | /// be disabled for correctness testing. |
| 128 | static cl::opt<bool> EnableForwardingConflictDetection( |
| 129 | "store-to-load-forwarding-conflict-detection", cl::Hidden, |
Matthew Simpson | a250dc9 | 2016-05-16 14:14:49 +0000 | [diff] [blame] | 130 | cl::desc("Enable conflict detection in loop-access analysis"), |
| 131 | cl::init(true)); |
| 132 | |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 133 | bool VectorizerParams::isInterleaveForced() { |
| 134 | return ::VectorizationInterleave.getNumOccurrences() > 0; |
| 135 | } |
| 136 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 137 | Value *llvm::stripIntegerCast(Value *V) { |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 138 | if (auto *CI = dyn_cast<CastInst>(V)) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 139 | if (CI->getOperand(0)->getType()->isIntegerTy()) |
| 140 | return CI->getOperand(0); |
| 141 | return V; |
| 142 | } |
| 143 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 144 | const SCEV *llvm::replaceSymbolicStrideSCEV(PredicatedScalarEvolution &PSE, |
Adam Nemet | 8bc61df | 2015-02-24 00:41:59 +0000 | [diff] [blame] | 145 | const ValueToValueMap &PtrToStride, |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 146 | Value *Ptr, Value *OrigPtr) { |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 147 | const SCEV *OrigSCEV = PSE.getSCEV(Ptr); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 148 | |
| 149 | // If there is an entry in the map return the SCEV of the pointer with the |
| 150 | // symbolic stride replaced by one. |
Adam Nemet | 8bc61df | 2015-02-24 00:41:59 +0000 | [diff] [blame] | 151 | ValueToValueMap::const_iterator SI = |
| 152 | PtrToStride.find(OrigPtr ? OrigPtr : Ptr); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 153 | if (SI != PtrToStride.end()) { |
| 154 | Value *StrideVal = SI->second; |
| 155 | |
| 156 | // Strip casts. |
| 157 | StrideVal = stripIntegerCast(StrideVal); |
| 158 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 159 | ScalarEvolution *SE = PSE.getSE(); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 160 | const auto *U = cast<SCEVUnknown>(SE->getSCEV(StrideVal)); |
| 161 | const auto *CT = |
| 162 | static_cast<const SCEVConstant *>(SE->getOne(StrideVal->getType())); |
| 163 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 164 | PSE.addPredicate(*SE->getEqualPredicate(U, CT)); |
| 165 | auto *Expr = PSE.getSCEV(Ptr); |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 166 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 167 | LLVM_DEBUG(dbgs() << "LAA: Replacing SCEV: " << *OrigSCEV |
| 168 | << " by: " << *Expr << "\n"); |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 169 | return Expr; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 170 | } |
| 171 | |
| 172 | // Otherwise, just return the SCEV of the original pointer. |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 173 | return OrigSCEV; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 174 | } |
| 175 | |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 176 | /// Calculate Start and End points of memory access. |
| 177 | /// Let's assume A is the first access and B is a memory access on N-th loop |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 178 | /// iteration. Then B is calculated as: |
| 179 | /// B = A + Step*N . |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 180 | /// Step value may be positive or negative. |
| 181 | /// N is a calculated back-edge taken count: |
| 182 | /// N = (TripCount > 0) ? RoundDown(TripCount -1 , VF) : 0 |
| 183 | /// Start and End points are calculated in the following way: |
| 184 | /// Start = UMIN(A, B) ; End = UMAX(A, B) + SizeOfElt, |
| 185 | /// where SizeOfElt is the size of single memory access in bytes. |
| 186 | /// |
| 187 | /// There is no conflict when the intervals are disjoint: |
| 188 | /// NoConflict = (P2.Start >= P1.End) || (P1.Start >= P2.End) |
Adam Nemet | 7cdebac | 2015-07-14 22:32:44 +0000 | [diff] [blame] | 189 | void RuntimePointerChecking::insert(Loop *Lp, Value *Ptr, bool WritePtr, |
| 190 | unsigned DepSetId, unsigned ASId, |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 191 | const ValueToValueMap &Strides, |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 192 | PredicatedScalarEvolution &PSE) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 193 | // Get the stride replaced scev. |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 194 | const SCEV *Sc = replaceSymbolicStrideSCEV(PSE, Strides, Ptr); |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 195 | ScalarEvolution *SE = PSE.getSE(); |
Silviu Baranga | 0e5804a | 2015-07-16 14:02:58 +0000 | [diff] [blame] | 196 | |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 197 | const SCEV *ScStart; |
| 198 | const SCEV *ScEnd; |
Silviu Baranga | 0e5804a | 2015-07-16 14:02:58 +0000 | [diff] [blame] | 199 | |
Adam Nemet | 59a6550 | 2016-03-24 05:15:24 +0000 | [diff] [blame] | 200 | if (SE->isLoopInvariant(Sc, Lp)) |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 201 | ScStart = ScEnd = Sc; |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 202 | else { |
| 203 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Sc); |
| 204 | assert(AR && "Invalid addrec expression"); |
Silviu Baranga | 6f444df | 2016-04-08 14:29:09 +0000 | [diff] [blame] | 205 | const SCEV *Ex = PSE.getBackedgeTakenCount(); |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 206 | |
| 207 | ScStart = AR->getStart(); |
| 208 | ScEnd = AR->evaluateAtIteration(Ex, *SE); |
| 209 | const SCEV *Step = AR->getStepRecurrence(*SE); |
| 210 | |
| 211 | // For expressions with negative step, the upper bound is ScStart and the |
| 212 | // lower bound is ScEnd. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 213 | if (const auto *CStep = dyn_cast<SCEVConstant>(Step)) { |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 214 | if (CStep->getValue()->isNegative()) |
| 215 | std::swap(ScStart, ScEnd); |
| 216 | } else { |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 217 | // Fallback case: the step is not constant, but we can still |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 218 | // get the upper and lower bounds of the interval by using min/max |
| 219 | // expressions. |
| 220 | ScStart = SE->getUMinExpr(ScStart, ScEnd); |
| 221 | ScEnd = SE->getUMaxExpr(AR->getStart(), ScEnd); |
| 222 | } |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 223 | // Add the size of the pointed element to ScEnd. |
| 224 | unsigned EltSize = |
| 225 | Ptr->getType()->getPointerElementType()->getScalarSizeInBits() / 8; |
| 226 | const SCEV *EltSizeSCEV = SE->getConstant(ScEnd->getType(), EltSize); |
| 227 | ScEnd = SE->getAddExpr(ScEnd, EltSizeSCEV); |
Silviu Baranga | 0e5804a | 2015-07-16 14:02:58 +0000 | [diff] [blame] | 228 | } |
| 229 | |
| 230 | Pointers.emplace_back(Ptr, ScStart, ScEnd, WritePtr, DepSetId, ASId, Sc); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 231 | } |
| 232 | |
Adam Nemet | bbe1f1d | 2015-07-27 19:38:48 +0000 | [diff] [blame] | 233 | SmallVector<RuntimePointerChecking::PointerCheck, 4> |
Adam Nemet | 3853088 | 2015-08-09 20:06:06 +0000 | [diff] [blame] | 234 | RuntimePointerChecking::generateChecks() const { |
Adam Nemet | bbe1f1d | 2015-07-27 19:38:48 +0000 | [diff] [blame] | 235 | SmallVector<PointerCheck, 4> Checks; |
| 236 | |
Adam Nemet | 7c52e05 | 2015-07-27 19:38:50 +0000 | [diff] [blame] | 237 | for (unsigned I = 0; I < CheckingGroups.size(); ++I) { |
| 238 | for (unsigned J = I + 1; J < CheckingGroups.size(); ++J) { |
| 239 | const RuntimePointerChecking::CheckingPtrGroup &CGI = CheckingGroups[I]; |
| 240 | const RuntimePointerChecking::CheckingPtrGroup &CGJ = CheckingGroups[J]; |
Adam Nemet | bbe1f1d | 2015-07-27 19:38:48 +0000 | [diff] [blame] | 241 | |
Adam Nemet | 3853088 | 2015-08-09 20:06:06 +0000 | [diff] [blame] | 242 | if (needsChecking(CGI, CGJ)) |
Adam Nemet | bbe1f1d | 2015-07-27 19:38:48 +0000 | [diff] [blame] | 243 | Checks.push_back(std::make_pair(&CGI, &CGJ)); |
| 244 | } |
| 245 | } |
| 246 | return Checks; |
| 247 | } |
| 248 | |
Adam Nemet | 1584039 | 2015-08-07 22:44:15 +0000 | [diff] [blame] | 249 | void RuntimePointerChecking::generateChecks( |
| 250 | MemoryDepChecker::DepCandidates &DepCands, bool UseDependencies) { |
| 251 | assert(Checks.empty() && "Checks is not empty"); |
| 252 | groupChecks(DepCands, UseDependencies); |
| 253 | Checks = generateChecks(); |
| 254 | } |
| 255 | |
Adam Nemet | 651a5a2 | 2015-08-09 20:06:08 +0000 | [diff] [blame] | 256 | bool RuntimePointerChecking::needsChecking(const CheckingPtrGroup &M, |
| 257 | const CheckingPtrGroup &N) const { |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 258 | for (unsigned I = 0, EI = M.Members.size(); EI != I; ++I) |
| 259 | for (unsigned J = 0, EJ = N.Members.size(); EJ != J; ++J) |
Adam Nemet | 651a5a2 | 2015-08-09 20:06:08 +0000 | [diff] [blame] | 260 | if (needsChecking(M.Members[I], N.Members[J])) |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 261 | return true; |
| 262 | return false; |
| 263 | } |
| 264 | |
| 265 | /// Compare \p I and \p J and return the minimum. |
| 266 | /// Return nullptr in case we couldn't find an answer. |
| 267 | static const SCEV *getMinFromExprs(const SCEV *I, const SCEV *J, |
| 268 | ScalarEvolution *SE) { |
| 269 | const SCEV *Diff = SE->getMinusSCEV(J, I); |
| 270 | const SCEVConstant *C = dyn_cast<const SCEVConstant>(Diff); |
| 271 | |
| 272 | if (!C) |
| 273 | return nullptr; |
| 274 | if (C->getValue()->isNegative()) |
| 275 | return J; |
| 276 | return I; |
| 277 | } |
| 278 | |
Adam Nemet | 7cdebac | 2015-07-14 22:32:44 +0000 | [diff] [blame] | 279 | bool RuntimePointerChecking::CheckingPtrGroup::addPointer(unsigned Index) { |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 280 | const SCEV *Start = RtCheck.Pointers[Index].Start; |
| 281 | const SCEV *End = RtCheck.Pointers[Index].End; |
| 282 | |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 283 | // Compare the starts and ends with the known minimum and maximum |
| 284 | // of this set. We need to know how we compare against the min/max |
| 285 | // of the set in order to be able to emit memchecks. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 286 | const SCEV *Min0 = getMinFromExprs(Start, Low, RtCheck.SE); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 287 | if (!Min0) |
| 288 | return false; |
| 289 | |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 290 | const SCEV *Min1 = getMinFromExprs(End, High, RtCheck.SE); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 291 | if (!Min1) |
| 292 | return false; |
| 293 | |
| 294 | // Update the low bound expression if we've found a new min value. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 295 | if (Min0 == Start) |
| 296 | Low = Start; |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 297 | |
| 298 | // Update the high bound expression if we've found a new max value. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 299 | if (Min1 != End) |
| 300 | High = End; |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 301 | |
| 302 | Members.push_back(Index); |
| 303 | return true; |
| 304 | } |
| 305 | |
Adam Nemet | 7cdebac | 2015-07-14 22:32:44 +0000 | [diff] [blame] | 306 | void RuntimePointerChecking::groupChecks( |
| 307 | MemoryDepChecker::DepCandidates &DepCands, bool UseDependencies) { |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 308 | // We build the groups from dependency candidates equivalence classes |
| 309 | // because: |
| 310 | // - We know that pointers in the same equivalence class share |
| 311 | // the same underlying object and therefore there is a chance |
| 312 | // that we can compare pointers |
| 313 | // - We wouldn't be able to merge two pointers for which we need |
| 314 | // to emit a memcheck. The classes in DepCands are already |
| 315 | // conveniently built such that no two pointers in the same |
| 316 | // class need checking against each other. |
| 317 | |
| 318 | // We use the following (greedy) algorithm to construct the groups |
| 319 | // For every pointer in the equivalence class: |
| 320 | // For each existing group: |
| 321 | // - if the difference between this pointer and the min/max bounds |
| 322 | // of the group is a constant, then make the pointer part of the |
| 323 | // group and update the min/max bounds of that group as required. |
| 324 | |
| 325 | CheckingGroups.clear(); |
| 326 | |
Silviu Baranga | 4825060 | 2015-07-28 13:44:08 +0000 | [diff] [blame] | 327 | // If we need to check two pointers to the same underlying object |
| 328 | // with a non-constant difference, we shouldn't perform any pointer |
| 329 | // grouping with those pointers. This is because we can easily get |
| 330 | // into cases where the resulting check would return false, even when |
| 331 | // the accesses are safe. |
| 332 | // |
| 333 | // The following example shows this: |
| 334 | // for (i = 0; i < 1000; ++i) |
| 335 | // a[5000 + i * m] = a[i] + a[i + 9000] |
| 336 | // |
| 337 | // Here grouping gives a check of (5000, 5000 + 1000 * m) against |
| 338 | // (0, 10000) which is always false. However, if m is 1, there is no |
| 339 | // dependence. Not grouping the checks for a[i] and a[i + 9000] allows |
| 340 | // us to perform an accurate check in this case. |
| 341 | // |
| 342 | // The above case requires that we have an UnknownDependence between |
| 343 | // accesses to the same underlying object. This cannot happen unless |
Florian Hahn | ef307b8 | 2018-12-20 18:49:09 +0000 | [diff] [blame] | 344 | // FoundNonConstantDistanceDependence is set, and therefore UseDependencies |
Silviu Baranga | 4825060 | 2015-07-28 13:44:08 +0000 | [diff] [blame] | 345 | // is also false. In this case we will use the fallback path and create |
| 346 | // separate checking groups for all pointers. |
Mehdi Amini | afd1351 | 2015-11-05 05:49:43 +0000 | [diff] [blame] | 347 | |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 348 | // If we don't have the dependency partitions, construct a new |
Silviu Baranga | 4825060 | 2015-07-28 13:44:08 +0000 | [diff] [blame] | 349 | // checking pointer group for each pointer. This is also required |
| 350 | // for correctness, because in this case we can have checking between |
| 351 | // pointers to the same underlying object. |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 352 | if (!UseDependencies) { |
| 353 | for (unsigned I = 0; I < Pointers.size(); ++I) |
| 354 | CheckingGroups.push_back(CheckingPtrGroup(I, *this)); |
| 355 | return; |
| 356 | } |
| 357 | |
| 358 | unsigned TotalComparisons = 0; |
| 359 | |
| 360 | DenseMap<Value *, unsigned> PositionMap; |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 361 | for (unsigned Index = 0; Index < Pointers.size(); ++Index) |
| 362 | PositionMap[Pointers[Index].PointerValue] = Index; |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 363 | |
Silviu Baranga | ce3877f | 2015-07-09 15:18:25 +0000 | [diff] [blame] | 364 | // We need to keep track of what pointers we've already seen so we |
| 365 | // don't process them twice. |
| 366 | SmallSet<unsigned, 2> Seen; |
| 367 | |
Sanjay Patel | e4b9f50 | 2015-12-07 19:21:39 +0000 | [diff] [blame] | 368 | // Go through all equivalence classes, get the "pointer check groups" |
Silviu Baranga | ce3877f | 2015-07-09 15:18:25 +0000 | [diff] [blame] | 369 | // and add them to the overall solution. We use the order in which accesses |
| 370 | // appear in 'Pointers' to enforce determinism. |
| 371 | for (unsigned I = 0; I < Pointers.size(); ++I) { |
| 372 | // We've seen this pointer before, and therefore already processed |
| 373 | // its equivalence class. |
| 374 | if (Seen.count(I)) |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 375 | continue; |
| 376 | |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 377 | MemoryDepChecker::MemAccessInfo Access(Pointers[I].PointerValue, |
| 378 | Pointers[I].IsWritePtr); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 379 | |
Silviu Baranga | ce3877f | 2015-07-09 15:18:25 +0000 | [diff] [blame] | 380 | SmallVector<CheckingPtrGroup, 2> Groups; |
| 381 | auto LeaderI = DepCands.findValue(DepCands.getLeaderValue(Access)); |
| 382 | |
Silviu Baranga | a647c30 | 2015-07-13 14:48:24 +0000 | [diff] [blame] | 383 | // Because DepCands is constructed by visiting accesses in the order in |
| 384 | // which they appear in alias sets (which is deterministic) and the |
| 385 | // iteration order within an equivalence class member is only dependent on |
| 386 | // the order in which unions and insertions are performed on the |
| 387 | // equivalence class, the iteration order is deterministic. |
Silviu Baranga | ce3877f | 2015-07-09 15:18:25 +0000 | [diff] [blame] | 388 | for (auto MI = DepCands.member_begin(LeaderI), ME = DepCands.member_end(); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 389 | MI != ME; ++MI) { |
| 390 | unsigned Pointer = PositionMap[MI->getPointer()]; |
| 391 | bool Merged = false; |
Silviu Baranga | ce3877f | 2015-07-09 15:18:25 +0000 | [diff] [blame] | 392 | // Mark this pointer as seen. |
| 393 | Seen.insert(Pointer); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 394 | |
| 395 | // Go through all the existing sets and see if we can find one |
| 396 | // which can include this pointer. |
| 397 | for (CheckingPtrGroup &Group : Groups) { |
| 398 | // Don't perform more than a certain amount of comparisons. |
| 399 | // This should limit the cost of grouping the pointers to something |
| 400 | // reasonable. If we do end up hitting this threshold, the algorithm |
| 401 | // will create separate groups for all remaining pointers. |
| 402 | if (TotalComparisons > MemoryCheckMergeThreshold) |
| 403 | break; |
| 404 | |
| 405 | TotalComparisons++; |
| 406 | |
| 407 | if (Group.addPointer(Pointer)) { |
| 408 | Merged = true; |
| 409 | break; |
| 410 | } |
| 411 | } |
| 412 | |
| 413 | if (!Merged) |
| 414 | // We couldn't add this pointer to any existing set or the threshold |
| 415 | // for the number of comparisons has been reached. Create a new group |
| 416 | // to hold the current pointer. |
| 417 | Groups.push_back(CheckingPtrGroup(Pointer, *this)); |
| 418 | } |
| 419 | |
| 420 | // We've computed the grouped checks for this partition. |
| 421 | // Save the results and continue with the next one. |
Fangrui Song | 7570932 | 2018-11-17 01:44:25 +0000 | [diff] [blame] | 422 | llvm::copy(Groups, std::back_inserter(CheckingGroups)); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 423 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 424 | } |
| 425 | |
Adam Nemet | 041e6de | 2015-07-16 02:48:05 +0000 | [diff] [blame] | 426 | bool RuntimePointerChecking::arePointersInSamePartition( |
| 427 | const SmallVectorImpl<int> &PtrToPartition, unsigned PtrIdx1, |
| 428 | unsigned PtrIdx2) { |
| 429 | return (PtrToPartition[PtrIdx1] != -1 && |
| 430 | PtrToPartition[PtrIdx1] == PtrToPartition[PtrIdx2]); |
| 431 | } |
| 432 | |
Adam Nemet | 651a5a2 | 2015-08-09 20:06:08 +0000 | [diff] [blame] | 433 | bool RuntimePointerChecking::needsChecking(unsigned I, unsigned J) const { |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 434 | const PointerInfo &PointerI = Pointers[I]; |
| 435 | const PointerInfo &PointerJ = Pointers[J]; |
| 436 | |
Adam Nemet | a8945b7 | 2015-02-18 03:43:58 +0000 | [diff] [blame] | 437 | // No need to check if two readonly pointers intersect. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 438 | if (!PointerI.IsWritePtr && !PointerJ.IsWritePtr) |
Adam Nemet | a8945b7 | 2015-02-18 03:43:58 +0000 | [diff] [blame] | 439 | return false; |
| 440 | |
| 441 | // Only need to check pointers between two different dependency sets. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 442 | if (PointerI.DependencySetId == PointerJ.DependencySetId) |
Adam Nemet | a8945b7 | 2015-02-18 03:43:58 +0000 | [diff] [blame] | 443 | return false; |
| 444 | |
| 445 | // Only need to check pointers in the same alias set. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 446 | if (PointerI.AliasSetId != PointerJ.AliasSetId) |
Adam Nemet | a8945b7 | 2015-02-18 03:43:58 +0000 | [diff] [blame] | 447 | return false; |
| 448 | |
| 449 | return true; |
| 450 | } |
| 451 | |
Adam Nemet | 54f0b83 | 2015-07-27 23:54:41 +0000 | [diff] [blame] | 452 | void RuntimePointerChecking::printChecks( |
| 453 | raw_ostream &OS, const SmallVectorImpl<PointerCheck> &Checks, |
| 454 | unsigned Depth) const { |
| 455 | unsigned N = 0; |
| 456 | for (const auto &Check : Checks) { |
| 457 | const auto &First = Check.first->Members, &Second = Check.second->Members; |
| 458 | |
| 459 | OS.indent(Depth) << "Check " << N++ << ":\n"; |
| 460 | |
| 461 | OS.indent(Depth + 2) << "Comparing group (" << Check.first << "):\n"; |
| 462 | for (unsigned K = 0; K < First.size(); ++K) |
| 463 | OS.indent(Depth + 2) << *Pointers[First[K]].PointerValue << "\n"; |
| 464 | |
| 465 | OS.indent(Depth + 2) << "Against group (" << Check.second << "):\n"; |
| 466 | for (unsigned K = 0; K < Second.size(); ++K) |
| 467 | OS.indent(Depth + 2) << *Pointers[Second[K]].PointerValue << "\n"; |
| 468 | } |
| 469 | } |
| 470 | |
Adam Nemet | 3a91e94 | 2015-08-07 19:44:48 +0000 | [diff] [blame] | 471 | void RuntimePointerChecking::print(raw_ostream &OS, unsigned Depth) const { |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 472 | |
| 473 | OS.indent(Depth) << "Run-time memory checks:\n"; |
Adam Nemet | 1584039 | 2015-08-07 22:44:15 +0000 | [diff] [blame] | 474 | printChecks(OS, Checks, Depth); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 475 | |
| 476 | OS.indent(Depth) << "Grouped accesses:\n"; |
| 477 | for (unsigned I = 0; I < CheckingGroups.size(); ++I) { |
Adam Nemet | 54f0b83 | 2015-07-27 23:54:41 +0000 | [diff] [blame] | 478 | const auto &CG = CheckingGroups[I]; |
| 479 | |
| 480 | OS.indent(Depth + 2) << "Group " << &CG << ":\n"; |
| 481 | OS.indent(Depth + 4) << "(Low: " << *CG.Low << " High: " << *CG.High |
| 482 | << ")\n"; |
| 483 | for (unsigned J = 0; J < CG.Members.size(); ++J) { |
| 484 | OS.indent(Depth + 6) << "Member: " << *Pointers[CG.Members[J]].Expr |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 485 | << "\n"; |
| 486 | } |
| 487 | } |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 488 | } |
| 489 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 490 | namespace { |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 491 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 492 | /// Analyses memory accesses in a loop. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 493 | /// |
| 494 | /// Checks whether run time pointer checks are needed and builds sets for data |
| 495 | /// dependence checking. |
| 496 | class AccessAnalysis { |
| 497 | public: |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 498 | /// Read or write access location. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 499 | typedef PointerIntPair<Value *, 1, bool> MemAccessInfo; |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 500 | typedef SmallVector<MemAccessInfo, 8> MemAccessInfoList; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 501 | |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 502 | AccessAnalysis(const DataLayout &Dl, Loop *TheLoop, AliasAnalysis *AA, |
| 503 | LoopInfo *LI, MemoryDepChecker::DepCandidates &DA, |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 504 | PredicatedScalarEvolution &PSE) |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 505 | : DL(Dl), TheLoop(TheLoop), AST(*AA), LI(LI), DepCands(DA), |
| 506 | IsRTCheckAnalysisNeeded(false), PSE(PSE) {} |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 507 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 508 | /// Register a load and whether it is only read from. |
Chandler Carruth | ac80dc7 | 2015-06-17 07:18:54 +0000 | [diff] [blame] | 509 | void addLoad(MemoryLocation &Loc, bool IsReadOnly) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 510 | Value *Ptr = const_cast<Value*>(Loc.Ptr); |
George Burgess IV | 6ef8002 | 2018-10-10 21:28:44 +0000 | [diff] [blame] | 511 | AST.add(Ptr, LocationSize::unknown(), Loc.AATags); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 512 | Accesses.insert(MemAccessInfo(Ptr, false)); |
| 513 | if (IsReadOnly) |
| 514 | ReadOnlyPtr.insert(Ptr); |
| 515 | } |
| 516 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 517 | /// Register a store. |
Chandler Carruth | ac80dc7 | 2015-06-17 07:18:54 +0000 | [diff] [blame] | 518 | void addStore(MemoryLocation &Loc) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 519 | Value *Ptr = const_cast<Value*>(Loc.Ptr); |
George Burgess IV | 6ef8002 | 2018-10-10 21:28:44 +0000 | [diff] [blame] | 520 | AST.add(Ptr, LocationSize::unknown(), Loc.AATags); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 521 | Accesses.insert(MemAccessInfo(Ptr, true)); |
| 522 | } |
| 523 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 524 | /// Check if we can emit a run-time no-alias check for \p Access. |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 525 | /// |
| 526 | /// Returns true if we can emit a run-time no alias check for \p Access. |
| 527 | /// If we can check this access, this also adds it to a dependence set and |
| 528 | /// adds a run-time to check for it to \p RtCheck. If \p Assume is true, |
| 529 | /// we will attempt to use additional run-time checks in order to get |
| 530 | /// the bounds of the pointer. |
| 531 | bool createCheckForAccess(RuntimePointerChecking &RtCheck, |
| 532 | MemAccessInfo Access, |
| 533 | const ValueToValueMap &Strides, |
| 534 | DenseMap<Value *, unsigned> &DepSetId, |
| 535 | Loop *TheLoop, unsigned &RunningDepId, |
| 536 | unsigned ASId, bool ShouldCheckStride, |
| 537 | bool Assume); |
| 538 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 539 | /// Check whether we can check the pointers at runtime for |
Adam Nemet | ee61474 | 2015-07-09 22:17:38 +0000 | [diff] [blame] | 540 | /// non-intersection. |
| 541 | /// |
| 542 | /// Returns true if we need no check or if we do and we can generate them |
| 543 | /// (i.e. the pointers have computable bounds). |
Adam Nemet | 7cdebac | 2015-07-14 22:32:44 +0000 | [diff] [blame] | 544 | bool canCheckPtrAtRT(RuntimePointerChecking &RtCheck, ScalarEvolution *SE, |
| 545 | Loop *TheLoop, const ValueToValueMap &Strides, |
Andrey Turetskiy | 9f02c58 | 2016-06-07 14:55:27 +0000 | [diff] [blame] | 546 | bool ShouldCheckWrap = false); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 547 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 548 | /// Goes over all memory accesses, checks whether a RT check is needed |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 549 | /// and builds sets of dependent accesses. |
| 550 | void buildDependenceSets() { |
| 551 | processMemAccesses(); |
| 552 | } |
| 553 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 554 | /// Initial processing of memory accesses determined that we need to |
Adam Nemet | 5dc3b2c | 2015-07-09 06:47:18 +0000 | [diff] [blame] | 555 | /// perform dependency checking. |
| 556 | /// |
| 557 | /// Note that this can later be cleared if we retry memcheck analysis without |
Florian Hahn | ef307b8 | 2018-12-20 18:49:09 +0000 | [diff] [blame] | 558 | /// dependency checking (i.e. FoundNonConstantDistanceDependence). |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 559 | bool isDependencyCheckNeeded() { return !CheckDeps.empty(); } |
Adam Nemet | df3dc5b | 2015-05-18 15:37:03 +0000 | [diff] [blame] | 560 | |
| 561 | /// We decided that no dependence analysis would be used. Reset the state. |
| 562 | void resetDepChecks(MemoryDepChecker &DepChecker) { |
| 563 | CheckDeps.clear(); |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 564 | DepChecker.clearDependences(); |
Adam Nemet | df3dc5b | 2015-05-18 15:37:03 +0000 | [diff] [blame] | 565 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 566 | |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 567 | MemAccessInfoList &getDependenciesToCheck() { return CheckDeps; } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 568 | |
| 569 | private: |
| 570 | typedef SetVector<MemAccessInfo> PtrAccessSet; |
| 571 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 572 | /// Go over all memory access and check whether runtime pointer checks |
Adam Nemet | b41d2d3 | 2015-07-09 06:47:21 +0000 | [diff] [blame] | 573 | /// are needed and build sets of dependency check candidates. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 574 | void processMemAccesses(); |
| 575 | |
| 576 | /// Set of all accesses. |
| 577 | PtrAccessSet Accesses; |
| 578 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 579 | const DataLayout &DL; |
| 580 | |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 581 | /// The loop being checked. |
| 582 | const Loop *TheLoop; |
| 583 | |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 584 | /// List of accesses that need a further dependence check. |
| 585 | MemAccessInfoList CheckDeps; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 586 | |
| 587 | /// Set of pointers that are read only. |
| 588 | SmallPtrSet<Value*, 16> ReadOnlyPtr; |
| 589 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 590 | /// An alias set tracker to partition the access set by underlying object and |
| 591 | //intrinsic property (such as TBAA metadata). |
| 592 | AliasSetTracker AST; |
| 593 | |
Adam Nemet | e2b885c | 2015-04-23 20:09:20 +0000 | [diff] [blame] | 594 | LoopInfo *LI; |
| 595 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 596 | /// Sets of potentially dependent accesses - members of one set share an |
| 597 | /// underlying pointer. The set "CheckDeps" identfies which sets really need a |
| 598 | /// dependence check. |
Adam Nemet | dee666b | 2015-03-10 17:40:34 +0000 | [diff] [blame] | 599 | MemoryDepChecker::DepCandidates &DepCands; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 600 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 601 | /// Initial processing of memory accesses determined that we may need |
Adam Nemet | 5dc3b2c | 2015-07-09 06:47:18 +0000 | [diff] [blame] | 602 | /// to add memchecks. Perform the analysis to determine the necessary checks. |
| 603 | /// |
| 604 | /// Note that, this is different from isDependencyCheckNeeded. When we retry |
| 605 | /// memcheck analysis without dependency checking |
Florian Hahn | ef307b8 | 2018-12-20 18:49:09 +0000 | [diff] [blame] | 606 | /// (i.e. FoundNonConstantDistanceDependence), isDependencyCheckNeeded is |
| 607 | /// cleared while this remains set if we have potentially dependent accesses. |
Adam Nemet | 5dc3b2c | 2015-07-09 06:47:18 +0000 | [diff] [blame] | 608 | bool IsRTCheckAnalysisNeeded; |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 609 | |
| 610 | /// The SCEV predicate containing all the SCEV-related assumptions. |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 611 | PredicatedScalarEvolution &PSE; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 612 | }; |
| 613 | |
| 614 | } // end anonymous namespace |
| 615 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 616 | /// Check whether a pointer can participate in a runtime bounds check. |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 617 | /// If \p Assume, try harder to prove that we can compute the bounds of \p Ptr |
| 618 | /// by adding run-time checks (overflow checks) if necessary. |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 619 | static bool hasComputableBounds(PredicatedScalarEvolution &PSE, |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 620 | const ValueToValueMap &Strides, Value *Ptr, |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 621 | Loop *L, bool Assume) { |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 622 | const SCEV *PtrScev = replaceSymbolicStrideSCEV(PSE, Strides, Ptr); |
Adam Nemet | 279784f | 2016-03-24 04:28:47 +0000 | [diff] [blame] | 623 | |
| 624 | // The bounds for loop-invariant pointer is trivial. |
| 625 | if (PSE.getSE()->isLoopInvariant(PtrScev, L)) |
| 626 | return true; |
| 627 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 628 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(PtrScev); |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 629 | |
| 630 | if (!AR && Assume) |
| 631 | AR = PSE.getAsAddRec(Ptr); |
| 632 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 633 | if (!AR) |
| 634 | return false; |
| 635 | |
| 636 | return AR->isAffine(); |
| 637 | } |
| 638 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 639 | /// Check whether a pointer address cannot wrap. |
Andrey Turetskiy | 9f02c58 | 2016-06-07 14:55:27 +0000 | [diff] [blame] | 640 | static bool isNoWrap(PredicatedScalarEvolution &PSE, |
| 641 | const ValueToValueMap &Strides, Value *Ptr, Loop *L) { |
| 642 | const SCEV *PtrScev = PSE.getSCEV(Ptr); |
| 643 | if (PSE.getSE()->isLoopInvariant(PtrScev, L)) |
| 644 | return true; |
| 645 | |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 646 | int64_t Stride = getPtrStride(PSE, Ptr, L, Strides); |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 647 | if (Stride == 1 || PSE.hasNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW)) |
| 648 | return true; |
| 649 | |
| 650 | return false; |
Andrey Turetskiy | 9f02c58 | 2016-06-07 14:55:27 +0000 | [diff] [blame] | 651 | } |
| 652 | |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 653 | bool AccessAnalysis::createCheckForAccess(RuntimePointerChecking &RtCheck, |
| 654 | MemAccessInfo Access, |
| 655 | const ValueToValueMap &StridesMap, |
| 656 | DenseMap<Value *, unsigned> &DepSetId, |
| 657 | Loop *TheLoop, unsigned &RunningDepId, |
| 658 | unsigned ASId, bool ShouldCheckWrap, |
| 659 | bool Assume) { |
| 660 | Value *Ptr = Access.getPointer(); |
| 661 | |
| 662 | if (!hasComputableBounds(PSE, StridesMap, Ptr, TheLoop, Assume)) |
| 663 | return false; |
| 664 | |
| 665 | // When we run after a failing dependency check we have to make sure |
| 666 | // we don't have wrapping pointers. |
| 667 | if (ShouldCheckWrap && !isNoWrap(PSE, StridesMap, Ptr, TheLoop)) { |
| 668 | auto *Expr = PSE.getSCEV(Ptr); |
| 669 | if (!Assume || !isa<SCEVAddRecExpr>(Expr)) |
| 670 | return false; |
| 671 | PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW); |
| 672 | } |
| 673 | |
| 674 | // The id of the dependence set. |
| 675 | unsigned DepId; |
| 676 | |
| 677 | if (isDependencyCheckNeeded()) { |
| 678 | Value *Leader = DepCands.getLeaderValue(Access).getPointer(); |
| 679 | unsigned &LeaderId = DepSetId[Leader]; |
| 680 | if (!LeaderId) |
| 681 | LeaderId = RunningDepId++; |
| 682 | DepId = LeaderId; |
| 683 | } else |
| 684 | // Each access has its own dependence set. |
| 685 | DepId = RunningDepId++; |
| 686 | |
| 687 | bool IsWrite = Access.getInt(); |
| 688 | RtCheck.insert(TheLoop, Ptr, IsWrite, DepId, ASId, StridesMap, PSE); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 689 | LLVM_DEBUG(dbgs() << "LAA: Found a runtime check ptr:" << *Ptr << '\n'); |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 690 | |
| 691 | return true; |
| 692 | } |
| 693 | |
Adam Nemet | 7cdebac | 2015-07-14 22:32:44 +0000 | [diff] [blame] | 694 | bool AccessAnalysis::canCheckPtrAtRT(RuntimePointerChecking &RtCheck, |
| 695 | ScalarEvolution *SE, Loop *TheLoop, |
| 696 | const ValueToValueMap &StridesMap, |
Andrey Turetskiy | 9f02c58 | 2016-06-07 14:55:27 +0000 | [diff] [blame] | 697 | bool ShouldCheckWrap) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 698 | // Find pointers with computable bounds. We are going to use this information |
| 699 | // to place a runtime bound check. |
| 700 | bool CanDoRT = true; |
| 701 | |
Adam Nemet | ee61474 | 2015-07-09 22:17:38 +0000 | [diff] [blame] | 702 | bool NeedRTCheck = false; |
Adam Nemet | 5dc3b2c | 2015-07-09 06:47:18 +0000 | [diff] [blame] | 703 | if (!IsRTCheckAnalysisNeeded) return true; |
Silviu Baranga | 98a1371 | 2015-06-08 10:27:06 +0000 | [diff] [blame] | 704 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 705 | bool IsDepCheckNeeded = isDependencyCheckNeeded(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 706 | |
| 707 | // We assign a consecutive id to access from different alias sets. |
| 708 | // Accesses between different groups doesn't need to be checked. |
| 709 | unsigned ASId = 1; |
| 710 | for (auto &AS : AST) { |
Adam Nemet | 424edc6 | 2015-07-08 22:58:48 +0000 | [diff] [blame] | 711 | int NumReadPtrChecks = 0; |
| 712 | int NumWritePtrChecks = 0; |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 713 | bool CanDoAliasSetRT = true; |
Adam Nemet | 424edc6 | 2015-07-08 22:58:48 +0000 | [diff] [blame] | 714 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 715 | // We assign consecutive id to access from different dependence sets. |
| 716 | // Accesses within the same set don't need a runtime check. |
| 717 | unsigned RunningDepId = 1; |
| 718 | DenseMap<Value *, unsigned> DepSetId; |
| 719 | |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 720 | SmallVector<MemAccessInfo, 4> Retries; |
| 721 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 722 | for (auto A : AS) { |
| 723 | Value *Ptr = A.getValue(); |
| 724 | bool IsWrite = Accesses.count(MemAccessInfo(Ptr, true)); |
| 725 | MemAccessInfo Access(Ptr, IsWrite); |
| 726 | |
Adam Nemet | 424edc6 | 2015-07-08 22:58:48 +0000 | [diff] [blame] | 727 | if (IsWrite) |
| 728 | ++NumWritePtrChecks; |
| 729 | else |
| 730 | ++NumReadPtrChecks; |
| 731 | |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 732 | if (!createCheckForAccess(RtCheck, Access, StridesMap, DepSetId, TheLoop, |
| 733 | RunningDepId, ASId, ShouldCheckWrap, false)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 734 | LLVM_DEBUG(dbgs() << "LAA: Can't find bounds for ptr:" << *Ptr << '\n'); |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 735 | Retries.push_back(Access); |
| 736 | CanDoAliasSetRT = false; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 737 | } |
| 738 | } |
| 739 | |
Adam Nemet | 424edc6 | 2015-07-08 22:58:48 +0000 | [diff] [blame] | 740 | // If we have at least two writes or one write and a read then we need to |
| 741 | // check them. But there is no need to checks if there is only one |
| 742 | // dependence set for this alias set. |
| 743 | // |
| 744 | // Note that this function computes CanDoRT and NeedRTCheck independently. |
| 745 | // For example CanDoRT=false, NeedRTCheck=false means that we have a pointer |
| 746 | // for which we couldn't find the bounds but we don't actually need to emit |
| 747 | // any checks so it does not matter. |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 748 | bool NeedsAliasSetRTCheck = false; |
| 749 | if (!(IsDepCheckNeeded && CanDoAliasSetRT && RunningDepId == 2)) |
| 750 | NeedsAliasSetRTCheck = (NumWritePtrChecks >= 2 || |
| 751 | (NumReadPtrChecks >= 1 && NumWritePtrChecks >= 1)); |
Adam Nemet | 424edc6 | 2015-07-08 22:58:48 +0000 | [diff] [blame] | 752 | |
Silviu Baranga | ac920f7 | 2017-09-12 07:48:22 +0000 | [diff] [blame] | 753 | // We need to perform run-time alias checks, but some pointers had bounds |
| 754 | // that couldn't be checked. |
| 755 | if (NeedsAliasSetRTCheck && !CanDoAliasSetRT) { |
| 756 | // Reset the CanDoSetRt flag and retry all accesses that have failed. |
| 757 | // We know that we need these checks, so we can now be more aggressive |
| 758 | // and add further checks if required (overflow checks). |
| 759 | CanDoAliasSetRT = true; |
| 760 | for (auto Access : Retries) |
| 761 | if (!createCheckForAccess(RtCheck, Access, StridesMap, DepSetId, |
| 762 | TheLoop, RunningDepId, ASId, |
| 763 | ShouldCheckWrap, /*Assume=*/true)) { |
| 764 | CanDoAliasSetRT = false; |
| 765 | break; |
| 766 | } |
| 767 | } |
| 768 | |
| 769 | CanDoRT &= CanDoAliasSetRT; |
| 770 | NeedRTCheck |= NeedsAliasSetRTCheck; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 771 | ++ASId; |
| 772 | } |
| 773 | |
| 774 | // If the pointers that we would use for the bounds comparison have different |
| 775 | // address spaces, assume the values aren't directly comparable, so we can't |
| 776 | // use them for the runtime check. We also have to assume they could |
| 777 | // overlap. In the future there should be metadata for whether address spaces |
| 778 | // are disjoint. |
| 779 | unsigned NumPointers = RtCheck.Pointers.size(); |
| 780 | for (unsigned i = 0; i < NumPointers; ++i) { |
| 781 | for (unsigned j = i + 1; j < NumPointers; ++j) { |
| 782 | // Only need to check pointers between two different dependency sets. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 783 | if (RtCheck.Pointers[i].DependencySetId == |
| 784 | RtCheck.Pointers[j].DependencySetId) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 785 | continue; |
| 786 | // Only need to check pointers in the same alias set. |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 787 | if (RtCheck.Pointers[i].AliasSetId != RtCheck.Pointers[j].AliasSetId) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 788 | continue; |
| 789 | |
Adam Nemet | 9f7dedc | 2015-07-14 22:32:50 +0000 | [diff] [blame] | 790 | Value *PtrI = RtCheck.Pointers[i].PointerValue; |
| 791 | Value *PtrJ = RtCheck.Pointers[j].PointerValue; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 792 | |
| 793 | unsigned ASi = PtrI->getType()->getPointerAddressSpace(); |
| 794 | unsigned ASj = PtrJ->getType()->getPointerAddressSpace(); |
| 795 | if (ASi != ASj) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 796 | LLVM_DEBUG( |
| 797 | dbgs() << "LAA: Runtime check would require comparison between" |
| 798 | " different address spaces\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 799 | return false; |
| 800 | } |
| 801 | } |
| 802 | } |
| 803 | |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 804 | if (NeedRTCheck && CanDoRT) |
Adam Nemet | 1584039 | 2015-08-07 22:44:15 +0000 | [diff] [blame] | 805 | RtCheck.generateChecks(DepCands, IsDepCheckNeeded); |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 806 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 807 | LLVM_DEBUG(dbgs() << "LAA: We need to do " << RtCheck.getNumberOfChecks() |
| 808 | << " pointer comparisons.\n"); |
Adam Nemet | ee61474 | 2015-07-09 22:17:38 +0000 | [diff] [blame] | 809 | |
| 810 | RtCheck.Need = NeedRTCheck; |
| 811 | |
| 812 | bool CanDoRTIfNeeded = !NeedRTCheck || CanDoRT; |
| 813 | if (!CanDoRTIfNeeded) |
| 814 | RtCheck.reset(); |
| 815 | return CanDoRTIfNeeded; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 816 | } |
| 817 | |
| 818 | void AccessAnalysis::processMemAccesses() { |
| 819 | // We process the set twice: first we process read-write pointers, last we |
| 820 | // process read-only pointers. This allows us to skip dependence tests for |
| 821 | // read-only pointers. |
| 822 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 823 | LLVM_DEBUG(dbgs() << "LAA: Processing memory accesses...\n"); |
| 824 | LLVM_DEBUG(dbgs() << " AST: "; AST.dump()); |
| 825 | LLVM_DEBUG(dbgs() << "LAA: Accesses(" << Accesses.size() << "):\n"); |
| 826 | LLVM_DEBUG({ |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 827 | for (auto A : Accesses) |
| 828 | dbgs() << "\t" << *A.getPointer() << " (" << |
| 829 | (A.getInt() ? "write" : (ReadOnlyPtr.count(A.getPointer()) ? |
| 830 | "read-only" : "read")) << ")\n"; |
| 831 | }); |
| 832 | |
| 833 | // The AliasSetTracker has nicely partitioned our pointers by metadata |
| 834 | // compatibility and potential for underlying-object overlap. As a result, we |
| 835 | // only need to check for potential pointer dependencies within each alias |
| 836 | // set. |
| 837 | for (auto &AS : AST) { |
| 838 | // Note that both the alias-set tracker and the alias sets themselves used |
| 839 | // linked lists internally and so the iteration order here is deterministic |
| 840 | // (matching the original instruction order within each set). |
| 841 | |
| 842 | bool SetHasWrite = false; |
| 843 | |
| 844 | // Map of pointers to last access encountered. |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 845 | typedef DenseMap<const Value*, MemAccessInfo> UnderlyingObjToAccessMap; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 846 | UnderlyingObjToAccessMap ObjToLastAccess; |
| 847 | |
| 848 | // Set of access to check after all writes have been processed. |
| 849 | PtrAccessSet DeferredAccesses; |
| 850 | |
| 851 | // Iterate over each alias set twice, once to process read/write pointers, |
| 852 | // and then to process read-only pointers. |
| 853 | for (int SetIteration = 0; SetIteration < 2; ++SetIteration) { |
| 854 | bool UseDeferred = SetIteration > 0; |
| 855 | PtrAccessSet &S = UseDeferred ? DeferredAccesses : Accesses; |
| 856 | |
| 857 | for (auto AV : AS) { |
| 858 | Value *Ptr = AV.getValue(); |
| 859 | |
| 860 | // For a single memory access in AliasSetTracker, Accesses may contain |
| 861 | // both read and write, and they both need to be handled for CheckDeps. |
| 862 | for (auto AC : S) { |
| 863 | if (AC.getPointer() != Ptr) |
| 864 | continue; |
| 865 | |
| 866 | bool IsWrite = AC.getInt(); |
| 867 | |
| 868 | // If we're using the deferred access set, then it contains only |
| 869 | // reads. |
| 870 | bool IsReadOnlyPtr = ReadOnlyPtr.count(Ptr) && !IsWrite; |
| 871 | if (UseDeferred && !IsReadOnlyPtr) |
| 872 | continue; |
| 873 | // Otherwise, the pointer must be in the PtrAccessSet, either as a |
| 874 | // read or a write. |
| 875 | assert(((IsReadOnlyPtr && UseDeferred) || IsWrite || |
| 876 | S.count(MemAccessInfo(Ptr, false))) && |
| 877 | "Alias-set pointer not in the access set?"); |
| 878 | |
| 879 | MemAccessInfo Access(Ptr, IsWrite); |
| 880 | DepCands.insert(Access); |
| 881 | |
| 882 | // Memorize read-only pointers for later processing and skip them in |
| 883 | // the first round (they need to be checked after we have seen all |
| 884 | // write pointers). Note: we also mark pointer that are not |
| 885 | // consecutive as "read-only" pointers (so that we check |
| 886 | // "a[b[i]] +="). Hence, we need the second check for "!IsWrite". |
| 887 | if (!UseDeferred && IsReadOnlyPtr) { |
| 888 | DeferredAccesses.insert(Access); |
| 889 | continue; |
| 890 | } |
| 891 | |
| 892 | // If this is a write - check other reads and writes for conflicts. If |
| 893 | // this is a read only check other writes for conflicts (but only if |
| 894 | // there is no other write to the ptr - this is an optimization to |
| 895 | // catch "a[i] = a[i] + " without having to do a dependence check). |
| 896 | if ((IsWrite || IsReadOnlyPtr) && SetHasWrite) { |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 897 | CheckDeps.push_back(Access); |
Adam Nemet | 5dc3b2c | 2015-07-09 06:47:18 +0000 | [diff] [blame] | 898 | IsRTCheckAnalysisNeeded = true; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 899 | } |
| 900 | |
| 901 | if (IsWrite) |
| 902 | SetHasWrite = true; |
| 903 | |
| 904 | // Create sets of pointers connected by a shared alias set and |
| 905 | // underlying object. |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 906 | typedef SmallVector<const Value *, 16> ValueVector; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 907 | ValueVector TempObjects; |
Adam Nemet | e2b885c | 2015-04-23 20:09:20 +0000 | [diff] [blame] | 908 | |
| 909 | GetUnderlyingObjects(Ptr, TempObjects, DL, LI); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 910 | LLVM_DEBUG(dbgs() |
| 911 | << "Underlying objects for pointer " << *Ptr << "\n"); |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 912 | for (const Value *UnderlyingObj : TempObjects) { |
Mehdi Amini | afd1351 | 2015-11-05 05:49:43 +0000 | [diff] [blame] | 913 | // nullptr never alias, don't join sets for pointer that have "null" |
| 914 | // in their UnderlyingObjects list. |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 915 | if (isa<ConstantPointerNull>(UnderlyingObj) && |
| 916 | !NullPointerIsDefined( |
| 917 | TheLoop->getHeader()->getParent(), |
| 918 | UnderlyingObj->getType()->getPointerAddressSpace())) |
Mehdi Amini | afd1351 | 2015-11-05 05:49:43 +0000 | [diff] [blame] | 919 | continue; |
| 920 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 921 | UnderlyingObjToAccessMap::iterator Prev = |
| 922 | ObjToLastAccess.find(UnderlyingObj); |
| 923 | if (Prev != ObjToLastAccess.end()) |
| 924 | DepCands.unionSets(Access, Prev->second); |
| 925 | |
| 926 | ObjToLastAccess[UnderlyingObj] = Access; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 927 | LLVM_DEBUG(dbgs() << " " << *UnderlyingObj << "\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 928 | } |
| 929 | } |
| 930 | } |
| 931 | } |
| 932 | } |
| 933 | } |
| 934 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 935 | static bool isInBoundsGep(Value *Ptr) { |
| 936 | if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr)) |
| 937 | return GEP->isInBounds(); |
| 938 | return false; |
| 939 | } |
| 940 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 941 | /// Return true if an AddRec pointer \p Ptr is unsigned non-wrapping, |
Adam Nemet | c4866d2 | 2015-06-26 17:25:43 +0000 | [diff] [blame] | 942 | /// i.e. monotonically increasing/decreasing. |
| 943 | static bool isNoWrapAddRec(Value *Ptr, const SCEVAddRecExpr *AR, |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 944 | PredicatedScalarEvolution &PSE, const Loop *L) { |
Adam Nemet | c4866d2 | 2015-06-26 17:25:43 +0000 | [diff] [blame] | 945 | // FIXME: This should probably only return true for NUW. |
| 946 | if (AR->getNoWrapFlags(SCEV::NoWrapMask)) |
| 947 | return true; |
| 948 | |
| 949 | // Scalar evolution does not propagate the non-wrapping flags to values that |
| 950 | // are derived from a non-wrapping induction variable because non-wrapping |
| 951 | // could be flow-sensitive. |
| 952 | // |
| 953 | // Look through the potentially overflowing instruction to try to prove |
| 954 | // non-wrapping for the *specific* value of Ptr. |
| 955 | |
| 956 | // The arithmetic implied by an inbounds GEP can't overflow. |
| 957 | auto *GEP = dyn_cast<GetElementPtrInst>(Ptr); |
| 958 | if (!GEP || !GEP->isInBounds()) |
| 959 | return false; |
| 960 | |
| 961 | // Make sure there is only one non-const index and analyze that. |
| 962 | Value *NonConstIndex = nullptr; |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 963 | for (Value *Index : make_range(GEP->idx_begin(), GEP->idx_end())) |
| 964 | if (!isa<ConstantInt>(Index)) { |
Adam Nemet | c4866d2 | 2015-06-26 17:25:43 +0000 | [diff] [blame] | 965 | if (NonConstIndex) |
| 966 | return false; |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 967 | NonConstIndex = Index; |
Adam Nemet | c4866d2 | 2015-06-26 17:25:43 +0000 | [diff] [blame] | 968 | } |
| 969 | if (!NonConstIndex) |
| 970 | // The recurrence is on the pointer, ignore for now. |
| 971 | return false; |
| 972 | |
| 973 | // The index in GEP is signed. It is non-wrapping if it's derived from a NSW |
| 974 | // AddRec using a NSW operation. |
| 975 | if (auto *OBO = dyn_cast<OverflowingBinaryOperator>(NonConstIndex)) |
| 976 | if (OBO->hasNoSignedWrap() && |
| 977 | // Assume constant for other the operand so that the AddRec can be |
| 978 | // easily found. |
| 979 | isa<ConstantInt>(OBO->getOperand(1))) { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 980 | auto *OpScev = PSE.getSCEV(OBO->getOperand(0)); |
Adam Nemet | c4866d2 | 2015-06-26 17:25:43 +0000 | [diff] [blame] | 981 | |
| 982 | if (auto *OpAR = dyn_cast<SCEVAddRecExpr>(OpScev)) |
| 983 | return OpAR->getLoop() == L && OpAR->getNoWrapFlags(SCEV::FlagNSW); |
| 984 | } |
| 985 | |
| 986 | return false; |
| 987 | } |
| 988 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 989 | /// Check whether the access through \p Ptr has a constant stride. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 990 | int64_t llvm::getPtrStride(PredicatedScalarEvolution &PSE, Value *Ptr, |
| 991 | const Loop *Lp, const ValueToValueMap &StridesMap, |
Elena Demikhovsky | 5f8cc0c | 2016-09-18 13:56:08 +0000 | [diff] [blame] | 992 | bool Assume, bool ShouldCheckWrap) { |
Craig Topper | e3dcce9 | 2015-08-01 22:20:21 +0000 | [diff] [blame] | 993 | Type *Ty = Ptr->getType(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 994 | assert(Ty->isPointerTy() && "Unexpected non-ptr"); |
| 995 | |
| 996 | // Make sure that the pointer does not point to aggregate types. |
Craig Topper | e3dcce9 | 2015-08-01 22:20:21 +0000 | [diff] [blame] | 997 | auto *PtrTy = cast<PointerType>(Ty); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 998 | if (PtrTy->getElementType()->isAggregateType()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 999 | LLVM_DEBUG(dbgs() << "LAA: Bad stride - Not a pointer to a scalar type" |
| 1000 | << *Ptr << "\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1001 | return 0; |
| 1002 | } |
| 1003 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 1004 | const SCEV *PtrScev = replaceSymbolicStrideSCEV(PSE, StridesMap, Ptr); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1005 | |
| 1006 | const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(PtrScev); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1007 | if (Assume && !AR) |
Silviu Baranga | d68ed85 | 2016-03-23 15:29:30 +0000 | [diff] [blame] | 1008 | AR = PSE.getAsAddRec(Ptr); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1009 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1010 | if (!AR) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1011 | LLVM_DEBUG(dbgs() << "LAA: Bad stride - Not an AddRecExpr pointer " << *Ptr |
| 1012 | << " SCEV: " << *PtrScev << "\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1013 | return 0; |
| 1014 | } |
| 1015 | |
Hiroshi Inoue | c437f31 | 2019-01-30 05:26:31 +0000 | [diff] [blame] | 1016 | // The access function must stride over the innermost loop. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1017 | if (Lp != AR->getLoop()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1018 | LLVM_DEBUG(dbgs() << "LAA: Bad stride - Not striding over innermost loop " |
| 1019 | << *Ptr << " SCEV: " << *AR << "\n"); |
Kyle Butt | a02ce98 | 2016-01-08 01:55:13 +0000 | [diff] [blame] | 1020 | return 0; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1021 | } |
| 1022 | |
| 1023 | // The address calculation must not wrap. Otherwise, a dependence could be |
| 1024 | // inverted. |
| 1025 | // An inbounds getelementptr that is a AddRec with a unit stride |
| 1026 | // cannot wrap per definition. The unit stride requirement is checked later. |
| 1027 | // An getelementptr without an inbounds attribute and unit stride would have |
| 1028 | // to access the pointer value "0" which is undefined behavior in address |
| 1029 | // space 0, therefore we can also vectorize this case. |
| 1030 | bool IsInBoundsGEP = isInBoundsGep(Ptr); |
Elena Demikhovsky | 5f8cc0c | 2016-09-18 13:56:08 +0000 | [diff] [blame] | 1031 | bool IsNoWrapAddRec = !ShouldCheckWrap || |
| 1032 | PSE.hasNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW) || |
| 1033 | isNoWrapAddRec(Ptr, AR, PSE, Lp); |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 1034 | if (!IsNoWrapAddRec && !IsInBoundsGEP && |
| 1035 | NullPointerIsDefined(Lp->getHeader()->getParent(), |
| 1036 | PtrTy->getAddressSpace())) { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1037 | if (Assume) { |
| 1038 | PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW); |
| 1039 | IsNoWrapAddRec = true; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1040 | LLVM_DEBUG(dbgs() << "LAA: Pointer may wrap in the address space:\n" |
| 1041 | << "LAA: Pointer: " << *Ptr << "\n" |
| 1042 | << "LAA: SCEV: " << *AR << "\n" |
| 1043 | << "LAA: Added an overflow assumption\n"); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1044 | } else { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1045 | LLVM_DEBUG( |
| 1046 | dbgs() << "LAA: Bad stride - Pointer may wrap in the address space " |
| 1047 | << *Ptr << " SCEV: " << *AR << "\n"); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1048 | return 0; |
| 1049 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1050 | } |
| 1051 | |
| 1052 | // Check the step is constant. |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 1053 | const SCEV *Step = AR->getStepRecurrence(*PSE.getSE()); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1054 | |
Adam Nemet | 943befe | 2015-07-09 00:03:22 +0000 | [diff] [blame] | 1055 | // Calculate the pointer stride and check if it is constant. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1056 | const SCEVConstant *C = dyn_cast<SCEVConstant>(Step); |
| 1057 | if (!C) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1058 | LLVM_DEBUG(dbgs() << "LAA: Bad stride - Not a constant strided " << *Ptr |
| 1059 | << " SCEV: " << *AR << "\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1060 | return 0; |
| 1061 | } |
| 1062 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1063 | auto &DL = Lp->getHeader()->getModule()->getDataLayout(); |
| 1064 | int64_t Size = DL.getTypeAllocSize(PtrTy->getElementType()); |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 1065 | const APInt &APStepVal = C->getAPInt(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1066 | |
| 1067 | // Huge step value - give up. |
| 1068 | if (APStepVal.getBitWidth() > 64) |
| 1069 | return 0; |
| 1070 | |
| 1071 | int64_t StepVal = APStepVal.getSExtValue(); |
| 1072 | |
| 1073 | // Strided access. |
| 1074 | int64_t Stride = StepVal / Size; |
| 1075 | int64_t Rem = StepVal % Size; |
| 1076 | if (Rem) |
| 1077 | return 0; |
| 1078 | |
| 1079 | // If the SCEV could wrap but we have an inbounds gep with a unit stride we |
| 1080 | // know we can't "wrap around the address space". In case of address space |
| 1081 | // zero we know that this won't happen without triggering undefined behavior. |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 1082 | if (!IsNoWrapAddRec && Stride != 1 && Stride != -1 && |
| 1083 | (IsInBoundsGEP || !NullPointerIsDefined(Lp->getHeader()->getParent(), |
| 1084 | PtrTy->getAddressSpace()))) { |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1085 | if (Assume) { |
| 1086 | // We can avoid this case by adding a run-time check. |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1087 | LLVM_DEBUG(dbgs() << "LAA: Non unit strided pointer which is not either " |
Hiroshi Inoue | c437f31 | 2019-01-30 05:26:31 +0000 | [diff] [blame] | 1088 | << "inbounds or in address space 0 may wrap:\n" |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1089 | << "LAA: Pointer: " << *Ptr << "\n" |
| 1090 | << "LAA: SCEV: " << *AR << "\n" |
| 1091 | << "LAA: Added an overflow assumption\n"); |
Silviu Baranga | ea63a7f | 2016-02-08 17:02:45 +0000 | [diff] [blame] | 1092 | PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW); |
| 1093 | } else |
| 1094 | return 0; |
| 1095 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1096 | |
| 1097 | return Stride; |
| 1098 | } |
| 1099 | |
Alexey Bataev | 428e9d9 | 2018-04-03 17:14:47 +0000 | [diff] [blame] | 1100 | bool llvm::sortPtrAccesses(ArrayRef<Value *> VL, const DataLayout &DL, |
| 1101 | ScalarEvolution &SE, |
| 1102 | SmallVectorImpl<unsigned> &SortedIndices) { |
| 1103 | assert(llvm::all_of( |
| 1104 | VL, [](const Value *V) { return V->getType()->isPointerTy(); }) && |
| 1105 | "Expected list of pointer operands."); |
| 1106 | SmallVector<std::pair<int64_t, Value *>, 4> OffValPairs; |
| 1107 | OffValPairs.reserve(VL.size()); |
| 1108 | |
| 1109 | // Walk over the pointers, and map each of them to an offset relative to |
| 1110 | // first pointer in the array. |
| 1111 | Value *Ptr0 = VL[0]; |
| 1112 | const SCEV *Scev0 = SE.getSCEV(Ptr0); |
| 1113 | Value *Obj0 = GetUnderlyingObject(Ptr0, DL); |
| 1114 | |
| 1115 | llvm::SmallSet<int64_t, 4> Offsets; |
| 1116 | for (auto *Ptr : VL) { |
| 1117 | // TODO: Outline this code as a special, more time consuming, version of |
| 1118 | // computeConstantDifference() function. |
| 1119 | if (Ptr->getType()->getPointerAddressSpace() != |
| 1120 | Ptr0->getType()->getPointerAddressSpace()) |
| 1121 | return false; |
| 1122 | // If a pointer refers to a different underlying object, bail - the |
| 1123 | // pointers are by definition incomparable. |
| 1124 | Value *CurrObj = GetUnderlyingObject(Ptr, DL); |
| 1125 | if (CurrObj != Obj0) |
| 1126 | return false; |
| 1127 | |
| 1128 | const SCEV *Scev = SE.getSCEV(Ptr); |
| 1129 | const auto *Diff = dyn_cast<SCEVConstant>(SE.getMinusSCEV(Scev, Scev0)); |
| 1130 | // The pointers may not have a constant offset from each other, or SCEV |
| 1131 | // may just not be smart enough to figure out they do. Regardless, |
| 1132 | // there's nothing we can do. |
| 1133 | if (!Diff) |
| 1134 | return false; |
| 1135 | |
| 1136 | // Check if the pointer with the same offset is found. |
| 1137 | int64_t Offset = Diff->getAPInt().getSExtValue(); |
| 1138 | if (!Offsets.insert(Offset).second) |
| 1139 | return false; |
| 1140 | OffValPairs.emplace_back(Offset, Ptr); |
| 1141 | } |
| 1142 | SortedIndices.clear(); |
| 1143 | SortedIndices.resize(VL.size()); |
| 1144 | std::iota(SortedIndices.begin(), SortedIndices.end(), 0); |
| 1145 | |
| 1146 | // Sort the memory accesses and keep the order of their uses in UseOrder. |
Fangrui Song | efd94c5 | 2019-04-23 14:51:27 +0000 | [diff] [blame] | 1147 | llvm::stable_sort(SortedIndices, [&](unsigned Left, unsigned Right) { |
| 1148 | return OffValPairs[Left].first < OffValPairs[Right].first; |
| 1149 | }); |
Alexey Bataev | 428e9d9 | 2018-04-03 17:14:47 +0000 | [diff] [blame] | 1150 | |
| 1151 | // Check if the order is consecutive already. |
| 1152 | if (llvm::all_of(SortedIndices, [&SortedIndices](const unsigned I) { |
| 1153 | return I == SortedIndices[I]; |
| 1154 | })) |
| 1155 | SortedIndices.clear(); |
| 1156 | |
| 1157 | return true; |
| 1158 | } |
| 1159 | |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1160 | /// Take the address space operand from the Load/Store instruction. |
| 1161 | /// Returns -1 if this is not a valid Load/Store instruction. |
| 1162 | static unsigned getAddressSpaceOperand(Value *I) { |
| 1163 | if (LoadInst *L = dyn_cast<LoadInst>(I)) |
| 1164 | return L->getPointerAddressSpace(); |
| 1165 | if (StoreInst *S = dyn_cast<StoreInst>(I)) |
| 1166 | return S->getPointerAddressSpace(); |
| 1167 | return -1; |
| 1168 | } |
| 1169 | |
| 1170 | /// Returns true if the memory operations \p A and \p B are consecutive. |
| 1171 | bool llvm::isConsecutiveAccess(Value *A, Value *B, const DataLayout &DL, |
| 1172 | ScalarEvolution &SE, bool CheckType) { |
Renato Golin | 038ede2 | 2018-03-09 21:05:58 +0000 | [diff] [blame] | 1173 | Value *PtrA = getLoadStorePointerOperand(A); |
| 1174 | Value *PtrB = getLoadStorePointerOperand(B); |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1175 | unsigned ASA = getAddressSpaceOperand(A); |
| 1176 | unsigned ASB = getAddressSpaceOperand(B); |
| 1177 | |
| 1178 | // Check that the address spaces match and that the pointers are valid. |
| 1179 | if (!PtrA || !PtrB || (ASA != ASB)) |
| 1180 | return false; |
| 1181 | |
| 1182 | // Make sure that A and B are different pointers. |
| 1183 | if (PtrA == PtrB) |
| 1184 | return false; |
| 1185 | |
| 1186 | // Make sure that A and B have the same type if required. |
Chad Rosier | 83a1203 | 2016-08-31 18:37:52 +0000 | [diff] [blame] | 1187 | if (CheckType && PtrA->getType() != PtrB->getType()) |
| 1188 | return false; |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1189 | |
Elena Demikhovsky | 945b7e5 | 2018-02-14 06:58:08 +0000 | [diff] [blame] | 1190 | unsigned IdxWidth = DL.getIndexSizeInBits(ASA); |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1191 | Type *Ty = cast<PointerType>(PtrA->getType())->getElementType(); |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1192 | |
Elena Demikhovsky | 945b7e5 | 2018-02-14 06:58:08 +0000 | [diff] [blame] | 1193 | APInt OffsetA(IdxWidth, 0), OffsetB(IdxWidth, 0); |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1194 | PtrA = PtrA->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetA); |
| 1195 | PtrB = PtrB->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetB); |
| 1196 | |
Michael Liao | 17a8a92 | 2019-07-18 17:30:27 +0000 | [diff] [blame] | 1197 | // Retrieve the address space again as pointer stripping now tracks through |
| 1198 | // `addrspacecast`. |
| 1199 | ASA = cast<PointerType>(PtrA->getType())->getAddressSpace(); |
| 1200 | ASB = cast<PointerType>(PtrB->getType())->getAddressSpace(); |
| 1201 | // Check that the address spaces match and that the pointers are valid. |
| 1202 | if (ASA != ASB) |
| 1203 | return false; |
| 1204 | |
| 1205 | IdxWidth = DL.getIndexSizeInBits(ASA); |
| 1206 | OffsetA = OffsetA.sextOrTrunc(IdxWidth); |
| 1207 | OffsetB = OffsetB.sextOrTrunc(IdxWidth); |
| 1208 | |
| 1209 | APInt Size(IdxWidth, DL.getTypeStoreSize(Ty)); |
| 1210 | |
Haicheng Wu | f1c00a2 | 2016-01-26 02:27:47 +0000 | [diff] [blame] | 1211 | // OffsetDelta = OffsetB - OffsetA; |
| 1212 | const SCEV *OffsetSCEVA = SE.getConstant(OffsetA); |
| 1213 | const SCEV *OffsetSCEVB = SE.getConstant(OffsetB); |
| 1214 | const SCEV *OffsetDeltaSCEV = SE.getMinusSCEV(OffsetSCEVB, OffsetSCEVA); |
| 1215 | const SCEVConstant *OffsetDeltaC = dyn_cast<SCEVConstant>(OffsetDeltaSCEV); |
| 1216 | const APInt &OffsetDelta = OffsetDeltaC->getAPInt(); |
| 1217 | // Check if they are based on the same pointer. That makes the offsets |
| 1218 | // sufficient. |
| 1219 | if (PtrA == PtrB) |
| 1220 | return OffsetDelta == Size; |
| 1221 | |
| 1222 | // Compute the necessary base pointer delta to have the necessary final delta |
| 1223 | // equal to the size. |
| 1224 | // BaseDelta = Size - OffsetDelta; |
| 1225 | const SCEV *SizeSCEV = SE.getConstant(Size); |
| 1226 | const SCEV *BaseDelta = SE.getMinusSCEV(SizeSCEV, OffsetDeltaSCEV); |
| 1227 | |
| 1228 | // Otherwise compute the distance with SCEV between the base pointers. |
| 1229 | const SCEV *PtrSCEVA = SE.getSCEV(PtrA); |
| 1230 | const SCEV *PtrSCEVB = SE.getSCEV(PtrB); |
| 1231 | const SCEV *X = SE.getAddExpr(PtrSCEVA, BaseDelta); |
| 1232 | return X == PtrSCEVB; |
| 1233 | } |
| 1234 | |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1235 | MemoryDepChecker::VectorizationSafetyStatus |
| 1236 | MemoryDepChecker::Dependence::isSafeForVectorization(DepType Type) { |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1237 | switch (Type) { |
| 1238 | case NoDep: |
| 1239 | case Forward: |
| 1240 | case BackwardVectorizable: |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1241 | return VectorizationSafetyStatus::Safe; |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1242 | |
| 1243 | case Unknown: |
Florian Hahn | ef307b8 | 2018-12-20 18:49:09 +0000 | [diff] [blame] | 1244 | return VectorizationSafetyStatus::PossiblySafeWithRtChecks; |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1245 | case ForwardButPreventsForwarding: |
| 1246 | case Backward: |
| 1247 | case BackwardVectorizableButPreventsForwarding: |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1248 | return VectorizationSafetyStatus::Unsafe; |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1249 | } |
David Majnemer | d388e93 | 2015-03-10 20:23:29 +0000 | [diff] [blame] | 1250 | llvm_unreachable("unexpected DepType!"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1251 | } |
| 1252 | |
Adam Nemet | 397f582 | 2015-11-03 23:50:03 +0000 | [diff] [blame] | 1253 | bool MemoryDepChecker::Dependence::isBackward() const { |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1254 | switch (Type) { |
| 1255 | case NoDep: |
| 1256 | case Forward: |
| 1257 | case ForwardButPreventsForwarding: |
Adam Nemet | 397f582 | 2015-11-03 23:50:03 +0000 | [diff] [blame] | 1258 | case Unknown: |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1259 | return false; |
| 1260 | |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1261 | case BackwardVectorizable: |
| 1262 | case Backward: |
| 1263 | case BackwardVectorizableButPreventsForwarding: |
| 1264 | return true; |
| 1265 | } |
David Majnemer | d388e93 | 2015-03-10 20:23:29 +0000 | [diff] [blame] | 1266 | llvm_unreachable("unexpected DepType!"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1267 | } |
| 1268 | |
Adam Nemet | 397f582 | 2015-11-03 23:50:03 +0000 | [diff] [blame] | 1269 | bool MemoryDepChecker::Dependence::isPossiblyBackward() const { |
| 1270 | return isBackward() || Type == Unknown; |
| 1271 | } |
| 1272 | |
| 1273 | bool MemoryDepChecker::Dependence::isForward() const { |
| 1274 | switch (Type) { |
| 1275 | case Forward: |
| 1276 | case ForwardButPreventsForwarding: |
| 1277 | return true; |
| 1278 | |
| 1279 | case NoDep: |
| 1280 | case Unknown: |
| 1281 | case BackwardVectorizable: |
| 1282 | case Backward: |
| 1283 | case BackwardVectorizableButPreventsForwarding: |
| 1284 | return false; |
| 1285 | } |
| 1286 | llvm_unreachable("unexpected DepType!"); |
| 1287 | } |
| 1288 | |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1289 | bool MemoryDepChecker::couldPreventStoreLoadForward(uint64_t Distance, |
| 1290 | uint64_t TypeByteSize) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1291 | // If loads occur at a distance that is not a multiple of a feasible vector |
| 1292 | // factor store-load forwarding does not take place. |
| 1293 | // Positive dependences might cause troubles because vectorizing them might |
| 1294 | // prevent store-load forwarding making vectorized code run a lot slower. |
| 1295 | // a[i] = a[i-3] ^ a[i-8]; |
| 1296 | // The stores to a[i:i+1] don't align with the stores to a[i-3:i-2] and |
| 1297 | // hence on your typical architecture store-load forwarding does not take |
| 1298 | // place. Vectorizing in such cases does not make sense. |
| 1299 | // Store-load forwarding distance. |
Adam Nemet | 884d313 | 2016-05-16 16:57:47 +0000 | [diff] [blame] | 1300 | |
| 1301 | // After this many iterations store-to-load forwarding conflicts should not |
| 1302 | // cause any slowdowns. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1303 | const uint64_t NumItersForStoreLoadThroughMemory = 8 * TypeByteSize; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1304 | // Maximum vector factor. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1305 | uint64_t MaxVFWithoutSLForwardIssues = std::min( |
Adam Nemet | 2c34ab5 | 2016-05-12 21:41:53 +0000 | [diff] [blame] | 1306 | VectorizerParams::MaxVectorWidth * TypeByteSize, MaxSafeDepDistBytes); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1307 | |
Adam Nemet | 884d313 | 2016-05-16 16:57:47 +0000 | [diff] [blame] | 1308 | // Compute the smallest VF at which the store and load would be misaligned. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1309 | for (uint64_t VF = 2 * TypeByteSize; VF <= MaxVFWithoutSLForwardIssues; |
Adam Nemet | 9b5852a | 2016-05-16 16:57:42 +0000 | [diff] [blame] | 1310 | VF *= 2) { |
Adam Nemet | 884d313 | 2016-05-16 16:57:47 +0000 | [diff] [blame] | 1311 | // If the number of vector iteration between the store and the load are |
| 1312 | // small we could incur conflicts. |
| 1313 | if (Distance % VF && Distance / VF < NumItersForStoreLoadThroughMemory) { |
Adam Nemet | 9b5852a | 2016-05-16 16:57:42 +0000 | [diff] [blame] | 1314 | MaxVFWithoutSLForwardIssues = (VF >>= 1); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1315 | break; |
| 1316 | } |
| 1317 | } |
| 1318 | |
Adam Nemet | 9b5852a | 2016-05-16 16:57:42 +0000 | [diff] [blame] | 1319 | if (MaxVFWithoutSLForwardIssues < 2 * TypeByteSize) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1320 | LLVM_DEBUG( |
| 1321 | dbgs() << "LAA: Distance " << Distance |
| 1322 | << " that could cause a store-load forwarding conflict\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1323 | return true; |
| 1324 | } |
| 1325 | |
| 1326 | if (MaxVFWithoutSLForwardIssues < MaxSafeDepDistBytes && |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 1327 | MaxVFWithoutSLForwardIssues != |
Adam Nemet | 9b5852a | 2016-05-16 16:57:42 +0000 | [diff] [blame] | 1328 | VectorizerParams::MaxVectorWidth * TypeByteSize) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1329 | MaxSafeDepDistBytes = MaxVFWithoutSLForwardIssues; |
| 1330 | return false; |
| 1331 | } |
| 1332 | |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1333 | void MemoryDepChecker::mergeInStatus(VectorizationSafetyStatus S) { |
| 1334 | if (Status < S) |
| 1335 | Status = S; |
| 1336 | } |
| 1337 | |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 1338 | /// Given a non-constant (unknown) dependence-distance \p Dist between two |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1339 | /// memory accesses, that have the same stride whose absolute value is given |
| 1340 | /// in \p Stride, and that have the same type size \p TypeByteSize, |
| 1341 | /// in a loop whose takenCount is \p BackedgeTakenCount, check if it is |
| 1342 | /// possible to prove statically that the dependence distance is larger |
| 1343 | /// than the range that the accesses will travel through the execution of |
| 1344 | /// the loop. If so, return true; false otherwise. This is useful for |
| 1345 | /// example in loops such as the following (PR31098): |
| 1346 | /// for (i = 0; i < D; ++i) { |
| 1347 | /// = out[i]; |
| 1348 | /// out[i+D] = |
| 1349 | /// } |
| 1350 | static bool isSafeDependenceDistance(const DataLayout &DL, ScalarEvolution &SE, |
| 1351 | const SCEV &BackedgeTakenCount, |
| 1352 | const SCEV &Dist, uint64_t Stride, |
| 1353 | uint64_t TypeByteSize) { |
| 1354 | |
| 1355 | // If we can prove that |
| 1356 | // (**) |Dist| > BackedgeTakenCount * Step |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 1357 | // where Step is the absolute stride of the memory accesses in bytes, |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1358 | // then there is no dependence. |
| 1359 | // |
Hiroshi Inoue | c437f31 | 2019-01-30 05:26:31 +0000 | [diff] [blame] | 1360 | // Rationale: |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 1361 | // We basically want to check if the absolute distance (|Dist/Step|) |
| 1362 | // is >= the loop iteration count (or > BackedgeTakenCount). |
| 1363 | // This is equivalent to the Strong SIV Test (Practical Dependence Testing, |
| 1364 | // Section 4.2.1); Note, that for vectorization it is sufficient to prove |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1365 | // that the dependence distance is >= VF; This is checked elsewhere. |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 1366 | // But in some cases we can prune unknown dependence distances early, and |
| 1367 | // even before selecting the VF, and without a runtime test, by comparing |
| 1368 | // the distance against the loop iteration count. Since the vectorized code |
| 1369 | // will be executed only if LoopCount >= VF, proving distance >= LoopCount |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1370 | // also guarantees that distance >= VF. |
| 1371 | // |
| 1372 | const uint64_t ByteStride = Stride * TypeByteSize; |
| 1373 | const SCEV *Step = SE.getConstant(BackedgeTakenCount.getType(), ByteStride); |
| 1374 | const SCEV *Product = SE.getMulExpr(&BackedgeTakenCount, Step); |
| 1375 | |
| 1376 | const SCEV *CastedDist = &Dist; |
| 1377 | const SCEV *CastedProduct = Product; |
| 1378 | uint64_t DistTypeSize = DL.getTypeAllocSize(Dist.getType()); |
| 1379 | uint64_t ProductTypeSize = DL.getTypeAllocSize(Product->getType()); |
| 1380 | |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 1381 | // The dependence distance can be positive/negative, so we sign extend Dist; |
| 1382 | // The multiplication of the absolute stride in bytes and the |
Hiroshi Inoue | c437f31 | 2019-01-30 05:26:31 +0000 | [diff] [blame] | 1383 | // backedgeTakenCount is non-negative, so we zero extend Product. |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1384 | if (DistTypeSize > ProductTypeSize) |
| 1385 | CastedProduct = SE.getZeroExtendExpr(Product, Dist.getType()); |
| 1386 | else |
| 1387 | CastedDist = SE.getNoopOrSignExtend(&Dist, Product->getType()); |
| 1388 | |
| 1389 | // Is Dist - (BackedgeTakenCount * Step) > 0 ? |
| 1390 | // (If so, then we have proven (**) because |Dist| >= Dist) |
| 1391 | const SCEV *Minus = SE.getMinusSCEV(CastedDist, CastedProduct); |
| 1392 | if (SE.isKnownPositive(Minus)) |
| 1393 | return true; |
| 1394 | |
| 1395 | // Second try: Is -Dist - (BackedgeTakenCount * Step) > 0 ? |
| 1396 | // (If so, then we have proven (**) because |Dist| >= -1*Dist) |
| 1397 | const SCEV *NegDist = SE.getNegativeSCEV(CastedDist); |
| 1398 | Minus = SE.getMinusSCEV(NegDist, CastedProduct); |
| 1399 | if (SE.isKnownPositive(Minus)) |
| 1400 | return true; |
| 1401 | |
| 1402 | return false; |
| 1403 | } |
| 1404 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 1405 | /// Check the dependence for two accesses with the same stride \p Stride. |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1406 | /// \p Distance is the positive distance and \p TypeByteSize is type size in |
| 1407 | /// bytes. |
| 1408 | /// |
| 1409 | /// \returns true if they are independent. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1410 | static bool areStridedAccessesIndependent(uint64_t Distance, uint64_t Stride, |
| 1411 | uint64_t TypeByteSize) { |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1412 | assert(Stride > 1 && "The stride must be greater than 1"); |
| 1413 | assert(TypeByteSize > 0 && "The type size in byte must be non-zero"); |
| 1414 | assert(Distance > 0 && "The distance must be non-zero"); |
| 1415 | |
| 1416 | // Skip if the distance is not multiple of type byte size. |
| 1417 | if (Distance % TypeByteSize) |
| 1418 | return false; |
| 1419 | |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1420 | uint64_t ScaledDist = Distance / TypeByteSize; |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1421 | |
| 1422 | // No dependence if the scaled distance is not multiple of the stride. |
| 1423 | // E.g. |
| 1424 | // for (i = 0; i < 1024 ; i += 4) |
| 1425 | // A[i+2] = A[i] + 1; |
| 1426 | // |
| 1427 | // Two accesses in memory (scaled distance is 2, stride is 4): |
| 1428 | // | A[0] | | | | A[4] | | | | |
| 1429 | // | | | A[2] | | | | A[6] | | |
| 1430 | // |
| 1431 | // E.g. |
| 1432 | // for (i = 0; i < 1024 ; i += 3) |
| 1433 | // A[i+4] = A[i] + 1; |
| 1434 | // |
| 1435 | // Two accesses in memory (scaled distance is 4, stride is 3): |
| 1436 | // | A[0] | | | A[3] | | | A[6] | | | |
| 1437 | // | | | | | A[4] | | | A[7] | | |
| 1438 | return ScaledDist % Stride; |
| 1439 | } |
| 1440 | |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1441 | MemoryDepChecker::Dependence::DepType |
| 1442 | MemoryDepChecker::isDependent(const MemAccessInfo &A, unsigned AIdx, |
| 1443 | const MemAccessInfo &B, unsigned BIdx, |
| 1444 | const ValueToValueMap &Strides) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1445 | assert (AIdx < BIdx && "Must pass arguments in program order"); |
| 1446 | |
| 1447 | Value *APtr = A.getPointer(); |
| 1448 | Value *BPtr = B.getPointer(); |
| 1449 | bool AIsWrite = A.getInt(); |
| 1450 | bool BIsWrite = B.getInt(); |
| 1451 | |
| 1452 | // Two reads are independent. |
| 1453 | if (!AIsWrite && !BIsWrite) |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1454 | return Dependence::NoDep; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1455 | |
| 1456 | // We cannot check pointers in different address spaces. |
| 1457 | if (APtr->getType()->getPointerAddressSpace() != |
| 1458 | BPtr->getType()->getPointerAddressSpace()) |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1459 | return Dependence::Unknown; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1460 | |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1461 | int64_t StrideAPtr = getPtrStride(PSE, APtr, InnermostLoop, Strides, true); |
| 1462 | int64_t StrideBPtr = getPtrStride(PSE, BPtr, InnermostLoop, Strides, true); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1463 | |
Silviu Baranga | adf4b73 | 2016-05-10 12:28:49 +0000 | [diff] [blame] | 1464 | const SCEV *Src = PSE.getSCEV(APtr); |
| 1465 | const SCEV *Sink = PSE.getSCEV(BPtr); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1466 | |
| 1467 | // If the induction step is negative we have to invert source and sink of the |
| 1468 | // dependence. |
| 1469 | if (StrideAPtr < 0) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1470 | std::swap(APtr, BPtr); |
| 1471 | std::swap(Src, Sink); |
| 1472 | std::swap(AIsWrite, BIsWrite); |
| 1473 | std::swap(AIdx, BIdx); |
| 1474 | std::swap(StrideAPtr, StrideBPtr); |
| 1475 | } |
| 1476 | |
Silviu Baranga | 9cd9a7e | 2015-12-09 16:06:28 +0000 | [diff] [blame] | 1477 | const SCEV *Dist = PSE.getSE()->getMinusSCEV(Sink, Src); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1478 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1479 | LLVM_DEBUG(dbgs() << "LAA: Src Scev: " << *Src << "Sink Scev: " << *Sink |
| 1480 | << "(Induction step: " << StrideAPtr << ")\n"); |
| 1481 | LLVM_DEBUG(dbgs() << "LAA: Distance for " << *InstMap[AIdx] << " to " |
| 1482 | << *InstMap[BIdx] << ": " << *Dist << "\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1483 | |
Adam Nemet | 943befe | 2015-07-09 00:03:22 +0000 | [diff] [blame] | 1484 | // Need accesses with constant stride. We don't want to vectorize |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1485 | // "A[B[i]] += ..." and similar code or pointer arithmetic that could wrap in |
| 1486 | // the address space. |
| 1487 | if (!StrideAPtr || !StrideBPtr || StrideAPtr != StrideBPtr){ |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1488 | LLVM_DEBUG(dbgs() << "Pointer access with non-constant stride\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1489 | return Dependence::Unknown; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1490 | } |
| 1491 | |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1492 | Type *ATy = APtr->getType()->getPointerElementType(); |
| 1493 | Type *BTy = BPtr->getType()->getPointerElementType(); |
| 1494 | auto &DL = InnermostLoop->getHeader()->getModule()->getDataLayout(); |
| 1495 | uint64_t TypeByteSize = DL.getTypeAllocSize(ATy); |
| 1496 | uint64_t Stride = std::abs(StrideAPtr); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1497 | const SCEVConstant *C = dyn_cast<SCEVConstant>(Dist); |
| 1498 | if (!C) { |
Dorit Nuzman | eac89d7 | 2017-02-12 09:32:53 +0000 | [diff] [blame] | 1499 | if (TypeByteSize == DL.getTypeAllocSize(BTy) && |
| 1500 | isSafeDependenceDistance(DL, *(PSE.getSE()), |
| 1501 | *(PSE.getBackedgeTakenCount()), *Dist, Stride, |
| 1502 | TypeByteSize)) |
| 1503 | return Dependence::NoDep; |
| 1504 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1505 | LLVM_DEBUG(dbgs() << "LAA: Dependence because of non-constant distance\n"); |
Florian Hahn | ef307b8 | 2018-12-20 18:49:09 +0000 | [diff] [blame] | 1506 | FoundNonConstantDistanceDependence = true; |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1507 | return Dependence::Unknown; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1508 | } |
| 1509 | |
Sanjoy Das | 0de2fec | 2015-12-17 20:28:46 +0000 | [diff] [blame] | 1510 | const APInt &Val = C->getAPInt(); |
Matthew Simpson | 6feebe9 | 2016-05-19 15:37:19 +0000 | [diff] [blame] | 1511 | int64_t Distance = Val.getSExtValue(); |
Matthew Simpson | 6feebe9 | 2016-05-19 15:37:19 +0000 | [diff] [blame] | 1512 | |
| 1513 | // Attempt to prove strided accesses independent. |
| 1514 | if (std::abs(Distance) > 0 && Stride > 1 && ATy == BTy && |
| 1515 | areStridedAccessesIndependent(std::abs(Distance), Stride, TypeByteSize)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1516 | LLVM_DEBUG(dbgs() << "LAA: Strided accesses are independent\n"); |
Matthew Simpson | 6feebe9 | 2016-05-19 15:37:19 +0000 | [diff] [blame] | 1517 | return Dependence::NoDep; |
| 1518 | } |
| 1519 | |
| 1520 | // Negative distances are not plausible dependencies. |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1521 | if (Val.isNegative()) { |
| 1522 | bool IsTrueDataDependence = (AIsWrite && !BIsWrite); |
Matthew Simpson | 37ec5f9 | 2016-05-16 17:00:56 +0000 | [diff] [blame] | 1523 | if (IsTrueDataDependence && EnableForwardingConflictDetection && |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1524 | (couldPreventStoreLoadForward(Val.abs().getZExtValue(), TypeByteSize) || |
Adam Nemet | b8486e5 | 2016-03-01 00:50:08 +0000 | [diff] [blame] | 1525 | ATy != BTy)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1526 | LLVM_DEBUG(dbgs() << "LAA: Forward but may prevent st->ld forwarding\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1527 | return Dependence::ForwardButPreventsForwarding; |
Adam Nemet | b8486e5 | 2016-03-01 00:50:08 +0000 | [diff] [blame] | 1528 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1529 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1530 | LLVM_DEBUG(dbgs() << "LAA: Dependence is negative\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1531 | return Dependence::Forward; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1532 | } |
| 1533 | |
| 1534 | // Write to the same location with the same size. |
| 1535 | // Could be improved to assert type sizes are the same (i32 == float, etc). |
| 1536 | if (Val == 0) { |
| 1537 | if (ATy == BTy) |
Adam Nemet | d7037c5 | 2015-11-03 20:13:43 +0000 | [diff] [blame] | 1538 | return Dependence::Forward; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1539 | LLVM_DEBUG( |
| 1540 | dbgs() << "LAA: Zero dependence difference but different types\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1541 | return Dependence::Unknown; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1542 | } |
| 1543 | |
| 1544 | assert(Val.isStrictlyPositive() && "Expect a positive value"); |
| 1545 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1546 | if (ATy != BTy) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1547 | LLVM_DEBUG( |
| 1548 | dbgs() |
| 1549 | << "LAA: ReadWrite-Write positive dependency with different types\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1550 | return Dependence::Unknown; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1551 | } |
| 1552 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1553 | // Bail out early if passed-in parameters make vectorization not feasible. |
Adam Nemet | f219c64 | 2015-02-19 19:14:52 +0000 | [diff] [blame] | 1554 | unsigned ForcedFactor = (VectorizerParams::VectorizationFactor ? |
| 1555 | VectorizerParams::VectorizationFactor : 1); |
| 1556 | unsigned ForcedUnroll = (VectorizerParams::VectorizationInterleave ? |
| 1557 | VectorizerParams::VectorizationInterleave : 1); |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1558 | // The minimum number of iterations for a vectorized/unrolled version. |
| 1559 | unsigned MinNumIter = std::max(ForcedFactor * ForcedUnroll, 2U); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1560 | |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1561 | // It's not vectorizable if the distance is smaller than the minimum distance |
| 1562 | // needed for a vectroized/unrolled version. Vectorizing one iteration in |
| 1563 | // front needs TypeByteSize * Stride. Vectorizing the last iteration needs |
| 1564 | // TypeByteSize (No need to plus the last gap distance). |
| 1565 | // |
| 1566 | // E.g. Assume one char is 1 byte in memory and one int is 4 bytes. |
| 1567 | // foo(int *A) { |
| 1568 | // int *B = (int *)((char *)A + 14); |
| 1569 | // for (i = 0 ; i < 1024 ; i += 2) |
| 1570 | // B[i] = A[i] + 1; |
| 1571 | // } |
| 1572 | // |
| 1573 | // Two accesses in memory (stride is 2): |
| 1574 | // | A[0] | | A[2] | | A[4] | | A[6] | | |
| 1575 | // | B[0] | | B[2] | | B[4] | |
| 1576 | // |
| 1577 | // Distance needs for vectorizing iterations except the last iteration: |
| 1578 | // 4 * 2 * (MinNumIter - 1). Distance needs for the last iteration: 4. |
| 1579 | // So the minimum distance needed is: 4 * 2 * (MinNumIter - 1) + 4. |
| 1580 | // |
| 1581 | // If MinNumIter is 2, it is vectorizable as the minimum distance needed is |
| 1582 | // 12, which is less than distance. |
| 1583 | // |
| 1584 | // If MinNumIter is 4 (Say if a user forces the vectorization factor to be 4), |
| 1585 | // the minimum distance needed is 28, which is greater than distance. It is |
| 1586 | // not safe to do vectorization. |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1587 | uint64_t MinDistanceNeeded = |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1588 | TypeByteSize * Stride * (MinNumIter - 1) + TypeByteSize; |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1589 | if (MinDistanceNeeded > static_cast<uint64_t>(Distance)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1590 | LLVM_DEBUG(dbgs() << "LAA: Failure because of positive distance " |
| 1591 | << Distance << '\n'); |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1592 | return Dependence::Backward; |
| 1593 | } |
| 1594 | |
| 1595 | // Unsafe if the minimum distance needed is greater than max safe distance. |
| 1596 | if (MinDistanceNeeded > MaxSafeDepDistBytes) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1597 | LLVM_DEBUG(dbgs() << "LAA: Failure because it needs at least " |
| 1598 | << MinDistanceNeeded << " size in bytes"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1599 | return Dependence::Backward; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1600 | } |
| 1601 | |
Adam Nemet | 9cc0c39 | 2015-02-26 17:58:48 +0000 | [diff] [blame] | 1602 | // Positive distance bigger than max vectorization factor. |
Hao Liu | 751004a | 2015-06-08 04:48:37 +0000 | [diff] [blame] | 1603 | // FIXME: Should use max factor instead of max distance in bytes, which could |
| 1604 | // not handle different types. |
| 1605 | // E.g. Assume one char is 1 byte in memory and one int is 4 bytes. |
| 1606 | // void foo (int *A, char *B) { |
| 1607 | // for (unsigned i = 0; i < 1024; i++) { |
| 1608 | // A[i+2] = A[i] + 1; |
| 1609 | // B[i+2] = B[i] + 1; |
| 1610 | // } |
| 1611 | // } |
| 1612 | // |
| 1613 | // This case is currently unsafe according to the max safe distance. If we |
| 1614 | // analyze the two accesses on array B, the max safe dependence distance |
| 1615 | // is 2. Then we analyze the accesses on array A, the minimum distance needed |
| 1616 | // is 8, which is less than 2 and forbidden vectorization, But actually |
| 1617 | // both A and B could be vectorized by 2 iterations. |
| 1618 | MaxSafeDepDistBytes = |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1619 | std::min(static_cast<uint64_t>(Distance), MaxSafeDepDistBytes); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1620 | |
| 1621 | bool IsTrueDataDependence = (!AIsWrite && BIsWrite); |
Matthew Simpson | 37ec5f9 | 2016-05-16 17:00:56 +0000 | [diff] [blame] | 1622 | if (IsTrueDataDependence && EnableForwardingConflictDetection && |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1623 | couldPreventStoreLoadForward(Distance, TypeByteSize)) |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1624 | return Dependence::BackwardVectorizableButPreventsForwarding; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1625 | |
Alon Kom | 682cfc1 | 2017-09-14 07:40:02 +0000 | [diff] [blame] | 1626 | uint64_t MaxVF = MaxSafeDepDistBytes / (TypeByteSize * Stride); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1627 | LLVM_DEBUG(dbgs() << "LAA: Positive distance " << Val.getSExtValue() |
| 1628 | << " with max VF = " << MaxVF << '\n'); |
Alon Kom | 682cfc1 | 2017-09-14 07:40:02 +0000 | [diff] [blame] | 1629 | uint64_t MaxVFInBits = MaxVF * TypeByteSize * 8; |
| 1630 | MaxSafeRegisterWidth = std::min(MaxSafeRegisterWidth, MaxVFInBits); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1631 | return Dependence::BackwardVectorizable; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1632 | } |
| 1633 | |
Adam Nemet | dee666b | 2015-03-10 17:40:34 +0000 | [diff] [blame] | 1634 | bool MemoryDepChecker::areDepsSafe(DepCandidates &AccessSets, |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 1635 | MemAccessInfoList &CheckDeps, |
Adam Nemet | 8bc61df | 2015-02-24 00:41:59 +0000 | [diff] [blame] | 1636 | const ValueToValueMap &Strides) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1637 | |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 1638 | MaxSafeDepDistBytes = -1; |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 1639 | SmallPtrSet<MemAccessInfo, 8> Visited; |
| 1640 | for (MemAccessInfo CurAccess : CheckDeps) { |
| 1641 | if (Visited.count(CurAccess)) |
| 1642 | continue; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1643 | |
| 1644 | // Get the relevant memory access set. |
| 1645 | EquivalenceClasses<MemAccessInfo>::iterator I = |
| 1646 | AccessSets.findValue(AccessSets.getLeaderValue(CurAccess)); |
| 1647 | |
| 1648 | // Check accesses within this set. |
Richard Trieu | 7a08381 | 2016-02-18 22:09:30 +0000 | [diff] [blame] | 1649 | EquivalenceClasses<MemAccessInfo>::member_iterator AI = |
| 1650 | AccessSets.member_begin(I); |
| 1651 | EquivalenceClasses<MemAccessInfo>::member_iterator AE = |
| 1652 | AccessSets.member_end(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1653 | |
| 1654 | // Check every access pair. |
| 1655 | while (AI != AE) { |
Amjad Aboud | 5448e98 | 2017-03-08 05:09:10 +0000 | [diff] [blame] | 1656 | Visited.insert(*AI); |
Hideki Saito | 09fac24 | 2019-08-02 06:31:50 +0000 | [diff] [blame] | 1657 | bool AIIsWrite = AI->getInt(); |
| 1658 | // Check loads only against next equivalent class, but stores also against |
| 1659 | // other stores in the same equivalence class - to the same address. |
| 1660 | EquivalenceClasses<MemAccessInfo>::member_iterator OI = |
| 1661 | (AIIsWrite ? AI : std::next(AI)); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1662 | while (OI != AE) { |
| 1663 | // Check every accessing instruction pair in program order. |
| 1664 | for (std::vector<unsigned>::iterator I1 = Accesses[*AI].begin(), |
| 1665 | I1E = Accesses[*AI].end(); I1 != I1E; ++I1) |
Hideki Saito | 09fac24 | 2019-08-02 06:31:50 +0000 | [diff] [blame] | 1666 | // Scan all accesses of another equivalence class, but only the next |
| 1667 | // accesses of the same equivalent class. |
| 1668 | for (std::vector<unsigned>::iterator |
| 1669 | I2 = (OI == AI ? std::next(I1) : Accesses[*OI].begin()), |
| 1670 | I2E = (OI == AI ? I1E : Accesses[*OI].end()); |
| 1671 | I2 != I2E; ++I2) { |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1672 | auto A = std::make_pair(&*AI, *I1); |
| 1673 | auto B = std::make_pair(&*OI, *I2); |
| 1674 | |
| 1675 | assert(*I1 != *I2); |
| 1676 | if (*I1 > *I2) |
| 1677 | std::swap(A, B); |
| 1678 | |
| 1679 | Dependence::DepType Type = |
| 1680 | isDependent(*A.first, A.second, *B.first, B.second, Strides); |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1681 | mergeInStatus(Dependence::isSafeForVectorization(Type)); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1682 | |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 1683 | // Gather dependences unless we accumulated MaxDependences |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1684 | // dependences. In that case return as soon as we find the first |
| 1685 | // unsafe dependence. This puts a limit on this quadratic |
| 1686 | // algorithm. |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 1687 | if (RecordDependences) { |
| 1688 | if (Type != Dependence::NoDep) |
| 1689 | Dependences.push_back(Dependence(A.second, B.second, Type)); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1690 | |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 1691 | if (Dependences.size() >= MaxDependences) { |
| 1692 | RecordDependences = false; |
| 1693 | Dependences.clear(); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1694 | LLVM_DEBUG(dbgs() |
| 1695 | << "Too many dependences, stopped recording\n"); |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1696 | } |
| 1697 | } |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1698 | if (!RecordDependences && !isSafeForVectorization()) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1699 | return false; |
| 1700 | } |
| 1701 | ++OI; |
| 1702 | } |
| 1703 | AI++; |
| 1704 | } |
| 1705 | } |
Adam Nemet | 9c92657 | 2015-03-10 17:40:37 +0000 | [diff] [blame] | 1706 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1707 | LLVM_DEBUG(dbgs() << "Total Dependences: " << Dependences.size() << "\n"); |
Florian Hahn | 485f282 | 2018-12-18 22:25:11 +0000 | [diff] [blame] | 1708 | return isSafeForVectorization(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1709 | } |
| 1710 | |
Adam Nemet | ec1e2bb | 2015-03-10 18:54:26 +0000 | [diff] [blame] | 1711 | SmallVector<Instruction *, 4> |
| 1712 | MemoryDepChecker::getInstructionsForAccess(Value *Ptr, bool isWrite) const { |
| 1713 | MemAccessInfo Access(Ptr, isWrite); |
| 1714 | auto &IndexVector = Accesses.find(Access)->second; |
| 1715 | |
| 1716 | SmallVector<Instruction *, 4> Insts; |
David Majnemer | 2d006e7 | 2016-08-12 04:32:42 +0000 | [diff] [blame] | 1717 | transform(IndexVector, |
Adam Nemet | ec1e2bb | 2015-03-10 18:54:26 +0000 | [diff] [blame] | 1718 | std::back_inserter(Insts), |
| 1719 | [&](unsigned Idx) { return this->InstMap[Idx]; }); |
| 1720 | return Insts; |
| 1721 | } |
| 1722 | |
Adam Nemet | 58913d6 | 2015-03-10 17:40:43 +0000 | [diff] [blame] | 1723 | const char *MemoryDepChecker::Dependence::DepName[] = { |
| 1724 | "NoDep", "Unknown", "Forward", "ForwardButPreventsForwarding", "Backward", |
| 1725 | "BackwardVectorizable", "BackwardVectorizableButPreventsForwarding"}; |
| 1726 | |
| 1727 | void MemoryDepChecker::Dependence::print( |
| 1728 | raw_ostream &OS, unsigned Depth, |
| 1729 | const SmallVectorImpl<Instruction *> &Instrs) const { |
| 1730 | OS.indent(Depth) << DepName[Type] << ":\n"; |
| 1731 | OS.indent(Depth + 2) << *Instrs[Source] << " -> \n"; |
| 1732 | OS.indent(Depth + 2) << *Instrs[Destination] << "\n"; |
| 1733 | } |
| 1734 | |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1735 | bool LoopAccessInfo::canAnalyzeLoop() { |
Adam Nemet | 8dcb3b6 | 2015-04-17 22:43:10 +0000 | [diff] [blame] | 1736 | // We need to have a loop header. |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1737 | LLVM_DEBUG(dbgs() << "LAA: Found a loop in " |
| 1738 | << TheLoop->getHeader()->getParent()->getName() << ": " |
| 1739 | << TheLoop->getHeader()->getName() << '\n'); |
Adam Nemet | 8dcb3b6 | 2015-04-17 22:43:10 +0000 | [diff] [blame] | 1740 | |
Adam Nemet | d8968f0 | 2016-01-18 21:16:33 +0000 | [diff] [blame] | 1741 | // We can only analyze innermost loops. |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1742 | if (!TheLoop->empty()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1743 | LLVM_DEBUG(dbgs() << "LAA: loop is not the innermost loop\n"); |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1744 | recordAnalysis("NotInnerMostLoop") << "loop is not the innermost loop"; |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1745 | return false; |
| 1746 | } |
| 1747 | |
| 1748 | // We must have a single backedge. |
| 1749 | if (TheLoop->getNumBackEdges() != 1) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1750 | LLVM_DEBUG( |
| 1751 | dbgs() << "LAA: loop control flow is not understood by analyzer\n"); |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1752 | recordAnalysis("CFGNotUnderstood") |
| 1753 | << "loop control flow is not understood by analyzer"; |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1754 | return false; |
| 1755 | } |
| 1756 | |
| 1757 | // We must have a single exiting block. |
| 1758 | if (!TheLoop->getExitingBlock()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1759 | LLVM_DEBUG( |
| 1760 | dbgs() << "LAA: loop control flow is not understood by analyzer\n"); |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1761 | recordAnalysis("CFGNotUnderstood") |
| 1762 | << "loop control flow is not understood by analyzer"; |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1763 | return false; |
| 1764 | } |
| 1765 | |
| 1766 | // We only handle bottom-tested loops, i.e. loop in which the condition is |
| 1767 | // checked at the end of each iteration. With that we can assume that all |
| 1768 | // instructions in the loop are executed the same number of times. |
| 1769 | if (TheLoop->getExitingBlock() != TheLoop->getLoopLatch()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1770 | LLVM_DEBUG( |
| 1771 | dbgs() << "LAA: loop control flow is not understood by analyzer\n"); |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1772 | recordAnalysis("CFGNotUnderstood") |
| 1773 | << "loop control flow is not understood by analyzer"; |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1774 | return false; |
| 1775 | } |
| 1776 | |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1777 | // ScalarEvolution needs to be able to find the exit count. |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 1778 | const SCEV *ExitCount = PSE->getBackedgeTakenCount(); |
| 1779 | if (ExitCount == PSE->getSE()->getCouldNotCompute()) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1780 | recordAnalysis("CantComputeNumberOfIterations") |
| 1781 | << "could not determine number of loop iterations"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1782 | LLVM_DEBUG(dbgs() << "LAA: SCEV could not compute the loop exit count.\n"); |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 1783 | return false; |
| 1784 | } |
| 1785 | |
| 1786 | return true; |
| 1787 | } |
| 1788 | |
Adam Nemet | b49d9a5 | 2016-07-13 22:36:27 +0000 | [diff] [blame] | 1789 | void LoopAccessInfo::analyzeLoop(AliasAnalysis *AA, LoopInfo *LI, |
Adam Nemet | 7da74ab | 2016-07-13 22:36:35 +0000 | [diff] [blame] | 1790 | const TargetLibraryInfo *TLI, |
| 1791 | DominatorTree *DT) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1792 | typedef SmallPtrSet<Value*, 16> ValueSet; |
| 1793 | |
Matthew Simpson | e3e3b99 | 2016-06-06 14:15:41 +0000 | [diff] [blame] | 1794 | // Holds the Load and Store instructions. |
| 1795 | SmallVector<LoadInst *, 16> Loads; |
| 1796 | SmallVector<StoreInst *, 16> Stores; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1797 | |
| 1798 | // Holds all the different accesses in the loop. |
| 1799 | unsigned NumReads = 0; |
| 1800 | unsigned NumReadWrites = 0; |
| 1801 | |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1802 | bool HasComplexMemInst = false; |
| 1803 | |
| 1804 | // A runtime check is only legal to insert if there are no convergent calls. |
| 1805 | HasConvergentOp = false; |
| 1806 | |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 1807 | PtrRtChecking->Pointers.clear(); |
| 1808 | PtrRtChecking->Need = false; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1809 | |
| 1810 | const bool IsAnnotatedParallel = TheLoop->isAnnotatedParallel(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1811 | |
| 1812 | // For each block. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1813 | for (BasicBlock *BB : TheLoop->blocks()) { |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1814 | // Scan the BB and collect legal loads and stores. Also detect any |
| 1815 | // convergent instructions. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1816 | for (Instruction &I : *BB) { |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1817 | if (auto *Call = dyn_cast<CallBase>(&I)) { |
| 1818 | if (Call->isConvergent()) |
| 1819 | HasConvergentOp = true; |
| 1820 | } |
| 1821 | |
| 1822 | // With both a non-vectorizable memory instruction and a convergent |
| 1823 | // operation, found in this loop, no reason to continue the search. |
| 1824 | if (HasComplexMemInst && HasConvergentOp) { |
| 1825 | CanVecMem = false; |
| 1826 | return; |
| 1827 | } |
| 1828 | |
| 1829 | // Avoid hitting recordAnalysis multiple times. |
| 1830 | if (HasComplexMemInst) |
| 1831 | continue; |
| 1832 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1833 | // If this is a load, save it. If this instruction can read from memory |
| 1834 | // but is not a load, then we quit. Notice that we don't handle function |
| 1835 | // calls that read or write. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1836 | if (I.mayReadFromMemory()) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1837 | // Many math library functions read the rounding mode. We will only |
| 1838 | // vectorize a loop if it contains known function calls that don't set |
| 1839 | // the flag. Therefore, it is safe to ignore this read from memory. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1840 | auto *Call = dyn_cast<CallInst>(&I); |
David Majnemer | b4b2723 | 2016-04-19 19:10:21 +0000 | [diff] [blame] | 1841 | if (Call && getVectorIntrinsicIDForCall(Call, TLI)) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1842 | continue; |
| 1843 | |
Michael Zolotukhin | 9b3cf60 | 2015-03-17 19:46:50 +0000 | [diff] [blame] | 1844 | // If the function has an explicit vectorized counterpart, we can safely |
| 1845 | // assume that it can be vectorized. |
| 1846 | if (Call && !Call->isNoBuiltin() && Call->getCalledFunction() && |
| 1847 | TLI->isFunctionVectorizable(Call->getCalledFunction()->getName())) |
| 1848 | continue; |
| 1849 | |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1850 | auto *Ld = dyn_cast<LoadInst>(&I); |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1851 | if (!Ld) { |
| 1852 | recordAnalysis("CantVectorizeInstruction", Ld) |
| 1853 | << "instruction cannot be vectorized"; |
| 1854 | HasComplexMemInst = true; |
| 1855 | continue; |
| 1856 | } |
| 1857 | if (!Ld->isSimple() && !IsAnnotatedParallel) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1858 | recordAnalysis("NonSimpleLoad", Ld) |
| 1859 | << "read with atomic ordering or volatile read"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1860 | LLVM_DEBUG(dbgs() << "LAA: Found a non-simple load.\n"); |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1861 | HasComplexMemInst = true; |
| 1862 | continue; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1863 | } |
| 1864 | NumLoads++; |
| 1865 | Loads.push_back(Ld); |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 1866 | DepChecker->addAccess(Ld); |
Adam Nemet | a9f09c6 | 2016-06-17 22:35:41 +0000 | [diff] [blame] | 1867 | if (EnableMemAccessVersioning) |
Adam Nemet | c953bb9 | 2016-06-16 22:57:55 +0000 | [diff] [blame] | 1868 | collectStridedAccess(Ld); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1869 | continue; |
| 1870 | } |
| 1871 | |
| 1872 | // Save 'store' instructions. Abort if other instructions write to memory. |
David Majnemer | 8b40101 | 2016-07-12 20:31:46 +0000 | [diff] [blame] | 1873 | if (I.mayWriteToMemory()) { |
| 1874 | auto *St = dyn_cast<StoreInst>(&I); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1875 | if (!St) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1876 | recordAnalysis("CantVectorizeInstruction", St) |
| 1877 | << "instruction cannot be vectorized"; |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1878 | HasComplexMemInst = true; |
| 1879 | continue; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1880 | } |
| 1881 | if (!St->isSimple() && !IsAnnotatedParallel) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 1882 | recordAnalysis("NonSimpleStore", St) |
| 1883 | << "write with atomic ordering or volatile write"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1884 | LLVM_DEBUG(dbgs() << "LAA: Found a non-simple store.\n"); |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1885 | HasComplexMemInst = true; |
| 1886 | continue; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1887 | } |
| 1888 | NumStores++; |
| 1889 | Stores.push_back(St); |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 1890 | DepChecker->addAccess(St); |
Adam Nemet | a9f09c6 | 2016-06-17 22:35:41 +0000 | [diff] [blame] | 1891 | if (EnableMemAccessVersioning) |
Adam Nemet | c953bb9 | 2016-06-16 22:57:55 +0000 | [diff] [blame] | 1892 | collectStridedAccess(St); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1893 | } |
| 1894 | } // Next instr. |
| 1895 | } // Next block. |
| 1896 | |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 1897 | if (HasComplexMemInst) { |
| 1898 | CanVecMem = false; |
| 1899 | return; |
| 1900 | } |
| 1901 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1902 | // Now we have two lists that hold the loads and the stores. |
| 1903 | // Next, we find the pointers that they use. |
| 1904 | |
| 1905 | // Check if we see any stores. If there are no stores, then we don't |
| 1906 | // care if the pointers are *restrict*. |
| 1907 | if (!Stores.size()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1908 | LLVM_DEBUG(dbgs() << "LAA: Found a read-only loop!\n"); |
Adam Nemet | 436018c | 2015-02-19 19:15:00 +0000 | [diff] [blame] | 1909 | CanVecMem = true; |
| 1910 | return; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1911 | } |
| 1912 | |
Adam Nemet | dee666b | 2015-03-10 17:40:34 +0000 | [diff] [blame] | 1913 | MemoryDepChecker::DepCandidates DependentAccesses; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1914 | AccessAnalysis Accesses(TheLoop->getHeader()->getModule()->getDataLayout(), |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 1915 | TheLoop, AA, LI, DependentAccesses, *PSE); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1916 | |
| 1917 | // Holds the analyzed pointers. We don't want to call GetUnderlyingObjects |
| 1918 | // multiple times on the same object. If the ptr is accessed twice, once |
| 1919 | // for read and once for write, it will only appear once (on the write |
| 1920 | // list). This is okay, since we are going to check for conflicts between |
| 1921 | // writes and between reads and writes, but not between reads and reads. |
| 1922 | ValueSet Seen; |
| 1923 | |
Anna Thomas | b1e3d45 | 2018-09-25 20:57:20 +0000 | [diff] [blame] | 1924 | // Record uniform store addresses to identify if we have multiple stores |
| 1925 | // to the same address. |
| 1926 | ValueSet UniformStores; |
| 1927 | |
Matthew Simpson | e3e3b99 | 2016-06-06 14:15:41 +0000 | [diff] [blame] | 1928 | for (StoreInst *ST : Stores) { |
| 1929 | Value *Ptr = ST->getPointerOperand(); |
Anna Thomas | b1e3d45 | 2018-09-25 20:57:20 +0000 | [diff] [blame] | 1930 | |
Anna Thomas | 6f732bf | 2018-10-16 15:46:26 +0000 | [diff] [blame] | 1931 | if (isUniform(Ptr)) |
Anna Thomas | 5e9215f | 2018-11-19 15:39:59 +0000 | [diff] [blame] | 1932 | HasDependenceInvolvingLoopInvariantAddress |= |
Anna Thomas | 6f732bf | 2018-10-16 15:46:26 +0000 | [diff] [blame] | 1933 | !UniformStores.insert(Ptr).second; |
Anna Thomas | b1e3d45 | 2018-09-25 20:57:20 +0000 | [diff] [blame] | 1934 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1935 | // If we did *not* see this pointer before, insert it to the read-write |
| 1936 | // list. At this phase it is only a 'write' list. |
| 1937 | if (Seen.insert(Ptr).second) { |
| 1938 | ++NumReadWrites; |
| 1939 | |
Chandler Carruth | ac80dc7 | 2015-06-17 07:18:54 +0000 | [diff] [blame] | 1940 | MemoryLocation Loc = MemoryLocation::get(ST); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1941 | // The TBAA metadata could have a control dependency on the predication |
| 1942 | // condition, so we cannot rely on it when determining whether or not we |
| 1943 | // need runtime pointer checks. |
Adam Nemet | 01abb2c | 2015-02-18 03:43:19 +0000 | [diff] [blame] | 1944 | if (blockNeedsPredication(ST->getParent(), TheLoop, DT)) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1945 | Loc.AATags.TBAA = nullptr; |
| 1946 | |
| 1947 | Accesses.addStore(Loc); |
| 1948 | } |
| 1949 | } |
| 1950 | |
| 1951 | if (IsAnnotatedParallel) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1952 | LLVM_DEBUG( |
| 1953 | dbgs() << "LAA: A loop annotated parallel, ignore memory dependency " |
| 1954 | << "checks.\n"); |
Adam Nemet | 436018c | 2015-02-19 19:15:00 +0000 | [diff] [blame] | 1955 | CanVecMem = true; |
| 1956 | return; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1957 | } |
| 1958 | |
Matthew Simpson | e3e3b99 | 2016-06-06 14:15:41 +0000 | [diff] [blame] | 1959 | for (LoadInst *LD : Loads) { |
| 1960 | Value *Ptr = LD->getPointerOperand(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1961 | // If we did *not* see this pointer before, insert it to the |
| 1962 | // read list. If we *did* see it before, then it is already in |
| 1963 | // the read-write list. This allows us to vectorize expressions |
| 1964 | // such as A[i] += x; Because the address of A[i] is a read-write |
| 1965 | // pointer. This only works if the index of A[i] is consecutive. |
| 1966 | // If the address of i is unknown (for example A[B[i]]) then we may |
| 1967 | // read a few words, modify, and write a few words, and some of the |
| 1968 | // words may be written to the same address. |
| 1969 | bool IsReadOnlyPtr = false; |
Adam Nemet | 139ffba | 2016-06-16 08:27:03 +0000 | [diff] [blame] | 1970 | if (Seen.insert(Ptr).second || |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 1971 | !getPtrStride(*PSE, Ptr, TheLoop, SymbolicStrides)) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1972 | ++NumReads; |
| 1973 | IsReadOnlyPtr = true; |
| 1974 | } |
| 1975 | |
Anna Thomas | 5e9215f | 2018-11-19 15:39:59 +0000 | [diff] [blame] | 1976 | // See if there is an unsafe dependency between a load to a uniform address and |
| 1977 | // store to the same uniform address. |
| 1978 | if (UniformStores.count(Ptr)) { |
| 1979 | LLVM_DEBUG(dbgs() << "LAA: Found an unsafe dependency between a uniform " |
| 1980 | "load and uniform store to the same address!\n"); |
| 1981 | HasDependenceInvolvingLoopInvariantAddress = true; |
| 1982 | } |
| 1983 | |
Chandler Carruth | ac80dc7 | 2015-06-17 07:18:54 +0000 | [diff] [blame] | 1984 | MemoryLocation Loc = MemoryLocation::get(LD); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1985 | // The TBAA metadata could have a control dependency on the predication |
| 1986 | // condition, so we cannot rely on it when determining whether or not we |
| 1987 | // need runtime pointer checks. |
Adam Nemet | 01abb2c | 2015-02-18 03:43:19 +0000 | [diff] [blame] | 1988 | if (blockNeedsPredication(LD->getParent(), TheLoop, DT)) |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 1989 | Loc.AATags.TBAA = nullptr; |
| 1990 | |
| 1991 | Accesses.addLoad(Loc, IsReadOnlyPtr); |
| 1992 | } |
| 1993 | |
| 1994 | // If we write (or read-write) to a single destination and there are no |
| 1995 | // other reads in this loop then is it safe to vectorize. |
| 1996 | if (NumReadWrites == 1 && NumReads == 0) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1997 | LLVM_DEBUG(dbgs() << "LAA: Found a write-only loop!\n"); |
Adam Nemet | 436018c | 2015-02-19 19:15:00 +0000 | [diff] [blame] | 1998 | CanVecMem = true; |
| 1999 | return; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2000 | } |
| 2001 | |
| 2002 | // Build dependence sets and check whether we need a runtime pointer bounds |
| 2003 | // check. |
| 2004 | Accesses.buildDependenceSets(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2005 | |
| 2006 | // Find pointers with computable bounds. We are going to use this information |
| 2007 | // to place a runtime bound check. |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2008 | bool CanDoRTIfNeeded = Accesses.canCheckPtrAtRT(*PtrRtChecking, PSE->getSE(), |
Adam Nemet | 139ffba | 2016-06-16 08:27:03 +0000 | [diff] [blame] | 2009 | TheLoop, SymbolicStrides); |
Adam Nemet | ee61474 | 2015-07-09 22:17:38 +0000 | [diff] [blame] | 2010 | if (!CanDoRTIfNeeded) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2011 | recordAnalysis("CantIdentifyArrayBounds") << "cannot identify array bounds"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2012 | LLVM_DEBUG(dbgs() << "LAA: We can't vectorize because we can't find " |
| 2013 | << "the array bounds.\n"); |
Adam Nemet | 436018c | 2015-02-19 19:15:00 +0000 | [diff] [blame] | 2014 | CanVecMem = false; |
| 2015 | return; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2016 | } |
| 2017 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2018 | LLVM_DEBUG( |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 2019 | dbgs() << "LAA: May be able to perform a memory runtime check if needed.\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2020 | |
Adam Nemet | 436018c | 2015-02-19 19:15:00 +0000 | [diff] [blame] | 2021 | CanVecMem = true; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2022 | if (Accesses.isDependencyCheckNeeded()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2023 | LLVM_DEBUG(dbgs() << "LAA: Checking memory dependencies\n"); |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2024 | CanVecMem = DepChecker->areDepsSafe( |
Adam Nemet | 139ffba | 2016-06-16 08:27:03 +0000 | [diff] [blame] | 2025 | DependentAccesses, Accesses.getDependenciesToCheck(), SymbolicStrides); |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2026 | MaxSafeDepDistBytes = DepChecker->getMaxSafeDepDistBytes(); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2027 | |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2028 | if (!CanVecMem && DepChecker->shouldRetryWithRuntimeCheck()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2029 | LLVM_DEBUG(dbgs() << "LAA: Retrying with memory checks\n"); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2030 | |
| 2031 | // Clear the dependency checks. We assume they are not needed. |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2032 | Accesses.resetDepChecks(*DepChecker); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2033 | |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2034 | PtrRtChecking->reset(); |
| 2035 | PtrRtChecking->Need = true; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2036 | |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2037 | auto *SE = PSE->getSE(); |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2038 | CanDoRTIfNeeded = Accesses.canCheckPtrAtRT(*PtrRtChecking, SE, TheLoop, |
Adam Nemet | 139ffba | 2016-06-16 08:27:03 +0000 | [diff] [blame] | 2039 | SymbolicStrides, true); |
Silviu Baranga | 98a1371 | 2015-06-08 10:27:06 +0000 | [diff] [blame] | 2040 | |
Adam Nemet | 949e91a | 2015-03-10 19:12:41 +0000 | [diff] [blame] | 2041 | // Check that we found the bounds for the pointer. |
Adam Nemet | ee61474 | 2015-07-09 22:17:38 +0000 | [diff] [blame] | 2042 | if (!CanDoRTIfNeeded) { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2043 | recordAnalysis("CantCheckMemDepsAtRunTime") |
| 2044 | << "cannot check memory dependencies at runtime"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2045 | LLVM_DEBUG(dbgs() << "LAA: Can't vectorize with memory checks\n"); |
Adam Nemet | b6dc76f | 2015-03-10 18:54:19 +0000 | [diff] [blame] | 2046 | CanVecMem = false; |
| 2047 | return; |
| 2048 | } |
| 2049 | |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2050 | CanVecMem = true; |
| 2051 | } |
| 2052 | } |
| 2053 | |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 2054 | if (HasConvergentOp) { |
| 2055 | recordAnalysis("CantInsertRuntimeCheckWithConvergent") |
| 2056 | << "cannot add control dependency to convergent operation"; |
| 2057 | LLVM_DEBUG(dbgs() << "LAA: We can't vectorize because a runtime check " |
| 2058 | "would be needed with a convergent operation\n"); |
| 2059 | CanVecMem = false; |
| 2060 | return; |
| 2061 | } |
| 2062 | |
Adam Nemet | 4bb90a7 | 2015-03-10 21:47:39 +0000 | [diff] [blame] | 2063 | if (CanVecMem) |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2064 | LLVM_DEBUG( |
| 2065 | dbgs() << "LAA: No unsafe dependent memory operations in loop. We" |
| 2066 | << (PtrRtChecking->Need ? "" : " don't") |
| 2067 | << " need runtime memory checks.\n"); |
Adam Nemet | 4bb90a7 | 2015-03-10 21:47:39 +0000 | [diff] [blame] | 2068 | else { |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2069 | recordAnalysis("UnsafeMemDep") |
Adam Nemet | 0a77dfa | 2016-05-09 23:03:44 +0000 | [diff] [blame] | 2070 | << "unsafe dependent memory operations in loop. Use " |
| 2071 | "#pragma loop distribute(enable) to allow loop distribution " |
| 2072 | "to attempt to isolate the offending operations into a separate " |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2073 | "loop"; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2074 | LLVM_DEBUG(dbgs() << "LAA: unsafe dependent memory operations in loop\n"); |
Adam Nemet | 4bb90a7 | 2015-03-10 21:47:39 +0000 | [diff] [blame] | 2075 | } |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2076 | } |
| 2077 | |
Adam Nemet | 01abb2c | 2015-02-18 03:43:19 +0000 | [diff] [blame] | 2078 | bool LoopAccessInfo::blockNeedsPredication(BasicBlock *BB, Loop *TheLoop, |
| 2079 | DominatorTree *DT) { |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2080 | assert(TheLoop->contains(BB) && "Unknown block used"); |
| 2081 | |
| 2082 | // Blocks that do not dominate the latch need predication. |
| 2083 | BasicBlock* Latch = TheLoop->getLoopLatch(); |
| 2084 | return !DT->dominates(BB, Latch); |
| 2085 | } |
| 2086 | |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2087 | OptimizationRemarkAnalysis &LoopAccessInfo::recordAnalysis(StringRef RemarkName, |
| 2088 | Instruction *I) { |
Adam Nemet | c922853 | 2015-02-19 19:14:56 +0000 | [diff] [blame] | 2089 | assert(!Report && "Multiple reports generated"); |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2090 | |
| 2091 | Value *CodeRegion = TheLoop->getHeader(); |
| 2092 | DebugLoc DL = TheLoop->getStartLoc(); |
| 2093 | |
| 2094 | if (I) { |
| 2095 | CodeRegion = I->getParent(); |
| 2096 | // If there is no debug location attached to the instruction, revert back to |
| 2097 | // using the loop's. |
| 2098 | if (I->getDebugLoc()) |
| 2099 | DL = I->getDebugLoc(); |
| 2100 | } |
| 2101 | |
Jonas Devlieghere | 0eaee54 | 2019-08-15 15:54:37 +0000 | [diff] [blame] | 2102 | Report = std::make_unique<OptimizationRemarkAnalysis>(DEBUG_TYPE, RemarkName, DL, |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2103 | CodeRegion); |
| 2104 | return *Report; |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2105 | } |
| 2106 | |
Adam Nemet | 57ac766 | 2015-02-19 19:15:21 +0000 | [diff] [blame] | 2107 | bool LoopAccessInfo::isUniform(Value *V) const { |
Michael Kuperstein | 3ceac2b | 2016-08-04 22:48:03 +0000 | [diff] [blame] | 2108 | auto *SE = PSE->getSE(); |
| 2109 | // Since we rely on SCEV for uniformity, if the type is not SCEVable, it is |
| 2110 | // never considered uniform. |
| 2111 | // TODO: Is this really what we want? Even without FP SCEV, we may want some |
| 2112 | // trivially loop-invariant FP values to be considered uniform. |
| 2113 | if (!SE->isSCEVable(V->getType())) |
| 2114 | return false; |
| 2115 | return (SE->isLoopInvariant(SE->getSCEV(V), TheLoop)); |
Adam Nemet | 0456327 | 2015-02-01 16:56:15 +0000 | [diff] [blame] | 2116 | } |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2117 | |
| 2118 | // FIXME: this function is currently a duplicate of the one in |
| 2119 | // LoopVectorize.cpp. |
| 2120 | static Instruction *getFirstInst(Instruction *FirstInst, Value *V, |
| 2121 | Instruction *Loc) { |
| 2122 | if (FirstInst) |
| 2123 | return FirstInst; |
| 2124 | if (Instruction *I = dyn_cast<Instruction>(V)) |
| 2125 | return I->getParent() == Loc->getParent() ? I : nullptr; |
| 2126 | return nullptr; |
| 2127 | } |
| 2128 | |
Benjamin Kramer | 039b104 | 2015-10-28 13:54:36 +0000 | [diff] [blame] | 2129 | namespace { |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 2130 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 2131 | /// IR Values for the lower and upper bounds of a pointer evolution. We |
Adam Nemet | 4e533ef | 2015-08-21 23:19:57 +0000 | [diff] [blame] | 2132 | /// need to use value-handles because SCEV expansion can invalidate previously |
| 2133 | /// expanded values. Thus expansion of a pointer can invalidate the bounds for |
| 2134 | /// a previous one. |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2135 | struct PointerBounds { |
Adam Nemet | 4e533ef | 2015-08-21 23:19:57 +0000 | [diff] [blame] | 2136 | TrackingVH<Value> Start; |
| 2137 | TrackingVH<Value> End; |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2138 | }; |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 2139 | |
Benjamin Kramer | 039b104 | 2015-10-28 13:54:36 +0000 | [diff] [blame] | 2140 | } // end anonymous namespace |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2141 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 2142 | /// Expand code for the lower and upper bound of the pointer group \p CG |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2143 | /// in \p TheLoop. \return the values for the bounds. |
| 2144 | static PointerBounds |
| 2145 | expandBounds(const RuntimePointerChecking::CheckingPtrGroup *CG, Loop *TheLoop, |
| 2146 | Instruction *Loc, SCEVExpander &Exp, ScalarEvolution *SE, |
| 2147 | const RuntimePointerChecking &PtrRtChecking) { |
| 2148 | Value *Ptr = PtrRtChecking.Pointers[CG->Members[0]].PointerValue; |
| 2149 | const SCEV *Sc = SE->getSCEV(Ptr); |
| 2150 | |
Keno Fischer | 92f377b | 2016-12-05 21:25:03 +0000 | [diff] [blame] | 2151 | unsigned AS = Ptr->getType()->getPointerAddressSpace(); |
| 2152 | LLVMContext &Ctx = Loc->getContext(); |
| 2153 | |
| 2154 | // Use this type for pointer arithmetic. |
| 2155 | Type *PtrArithTy = Type::getInt8PtrTy(Ctx, AS); |
| 2156 | |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2157 | if (SE->isLoopInvariant(Sc, TheLoop)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2158 | LLVM_DEBUG(dbgs() << "LAA: Adding RT check for a loop invariant ptr:" |
| 2159 | << *Ptr << "\n"); |
Keno Fischer | 92f377b | 2016-12-05 21:25:03 +0000 | [diff] [blame] | 2160 | // Ptr could be in the loop body. If so, expand a new one at the correct |
| 2161 | // location. |
| 2162 | Instruction *Inst = dyn_cast<Instruction>(Ptr); |
| 2163 | Value *NewPtr = (Inst && TheLoop->contains(Inst)) |
| 2164 | ? Exp.expandCodeFor(Sc, PtrArithTy, Loc) |
| 2165 | : Ptr; |
James Molloy | 37dd4d7 | 2017-04-05 09:24:26 +0000 | [diff] [blame] | 2166 | // We must return a half-open range, which means incrementing Sc. |
| 2167 | const SCEV *ScPlusOne = SE->getAddExpr(Sc, SE->getOne(PtrArithTy)); |
| 2168 | Value *NewPtrPlusOne = Exp.expandCodeFor(ScPlusOne, PtrArithTy, Loc); |
| 2169 | return {NewPtr, NewPtrPlusOne}; |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2170 | } else { |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2171 | Value *Start = nullptr, *End = nullptr; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2172 | LLVM_DEBUG(dbgs() << "LAA: Adding RT check for range:\n"); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2173 | Start = Exp.expandCodeFor(CG->Low, PtrArithTy, Loc); |
| 2174 | End = Exp.expandCodeFor(CG->High, PtrArithTy, Loc); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2175 | LLVM_DEBUG(dbgs() << "Start: " << *CG->Low << " End: " << *CG->High |
| 2176 | << "\n"); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2177 | return {Start, End}; |
| 2178 | } |
| 2179 | } |
| 2180 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 2181 | /// Turns a collection of checks into a collection of expanded upper and |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2182 | /// lower bounds for both pointers in the check. |
| 2183 | static SmallVector<std::pair<PointerBounds, PointerBounds>, 4> expandBounds( |
| 2184 | const SmallVectorImpl<RuntimePointerChecking::PointerCheck> &PointerChecks, |
| 2185 | Loop *L, Instruction *Loc, ScalarEvolution *SE, SCEVExpander &Exp, |
| 2186 | const RuntimePointerChecking &PtrRtChecking) { |
| 2187 | SmallVector<std::pair<PointerBounds, PointerBounds>, 4> ChecksWithBounds; |
| 2188 | |
| 2189 | // Here we're relying on the SCEV Expander's cache to only emit code for the |
| 2190 | // same bounds once. |
David Majnemer | 2d006e7 | 2016-08-12 04:32:42 +0000 | [diff] [blame] | 2191 | transform( |
| 2192 | PointerChecks, std::back_inserter(ChecksWithBounds), |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2193 | [&](const RuntimePointerChecking::PointerCheck &Check) { |
NAKAMURA Takumi | 94abbbd | 2015-07-27 01:35:30 +0000 | [diff] [blame] | 2194 | PointerBounds |
| 2195 | First = expandBounds(Check.first, L, Loc, Exp, SE, PtrRtChecking), |
| 2196 | Second = expandBounds(Check.second, L, Loc, Exp, SE, PtrRtChecking); |
| 2197 | return std::make_pair(First, Second); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2198 | }); |
| 2199 | |
| 2200 | return ChecksWithBounds; |
| 2201 | } |
| 2202 | |
Adam Nemet | 5b0a479 | 2015-08-11 00:09:37 +0000 | [diff] [blame] | 2203 | std::pair<Instruction *, Instruction *> LoopAccessInfo::addRuntimeChecks( |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2204 | Instruction *Loc, |
| 2205 | const SmallVectorImpl<RuntimePointerChecking::PointerCheck> &PointerChecks) |
| 2206 | const { |
Adam Nemet | 1824e41 | 2016-07-13 22:18:51 +0000 | [diff] [blame] | 2207 | const DataLayout &DL = TheLoop->getHeader()->getModule()->getDataLayout(); |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2208 | auto *SE = PSE->getSE(); |
Adam Nemet | 1824e41 | 2016-07-13 22:18:51 +0000 | [diff] [blame] | 2209 | SCEVExpander Exp(*SE, DL, "induction"); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2210 | auto ExpandedChecks = |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2211 | expandBounds(PointerChecks, TheLoop, Loc, SE, Exp, *PtrRtChecking); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2212 | |
| 2213 | LLVMContext &Ctx = Loc->getContext(); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2214 | Instruction *FirstInst = nullptr; |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2215 | IRBuilder<> ChkBuilder(Loc); |
| 2216 | // Our instructions might fold to a constant. |
| 2217 | Value *MemoryRuntimeCheck = nullptr; |
Silviu Baranga | 1b6b50a | 2015-07-08 09:16:33 +0000 | [diff] [blame] | 2218 | |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2219 | for (const auto &Check : ExpandedChecks) { |
| 2220 | const PointerBounds &A = Check.first, &B = Check.second; |
Adam Nemet | cdb791c | 2015-08-19 17:24:36 +0000 | [diff] [blame] | 2221 | // Check if two pointers (A and B) conflict where conflict is computed as: |
| 2222 | // start(A) <= end(B) && start(B) <= end(A) |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2223 | unsigned AS0 = A.Start->getType()->getPointerAddressSpace(); |
| 2224 | unsigned AS1 = B.Start->getType()->getPointerAddressSpace(); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2225 | |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2226 | assert((AS0 == B.End->getType()->getPointerAddressSpace()) && |
| 2227 | (AS1 == A.End->getType()->getPointerAddressSpace()) && |
| 2228 | "Trying to bounds check pointers with different address spaces"); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2229 | |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2230 | Type *PtrArithTy0 = Type::getInt8PtrTy(Ctx, AS0); |
| 2231 | Type *PtrArithTy1 = Type::getInt8PtrTy(Ctx, AS1); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2232 | |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2233 | Value *Start0 = ChkBuilder.CreateBitCast(A.Start, PtrArithTy0, "bc"); |
| 2234 | Value *Start1 = ChkBuilder.CreateBitCast(B.Start, PtrArithTy1, "bc"); |
| 2235 | Value *End0 = ChkBuilder.CreateBitCast(A.End, PtrArithTy1, "bc"); |
| 2236 | Value *End1 = ChkBuilder.CreateBitCast(B.End, PtrArithTy0, "bc"); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2237 | |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 2238 | // [A|B].Start points to the first accessed byte under base [A|B]. |
| 2239 | // [A|B].End points to the last accessed byte, plus one. |
| 2240 | // There is no conflict when the intervals are disjoint: |
| 2241 | // NoConflict = (B.Start >= A.End) || (A.Start >= B.End) |
| 2242 | // |
| 2243 | // bound0 = (B.Start < A.End) |
| 2244 | // bound1 = (A.Start < B.End) |
| 2245 | // IsConflict = bound0 & bound1 |
| 2246 | Value *Cmp0 = ChkBuilder.CreateICmpULT(Start0, End1, "bound0"); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2247 | FirstInst = getFirstInst(FirstInst, Cmp0, Loc); |
Elena Demikhovsky | 3622fbf | 2016-08-28 08:53:53 +0000 | [diff] [blame] | 2248 | Value *Cmp1 = ChkBuilder.CreateICmpULT(Start1, End0, "bound1"); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2249 | FirstInst = getFirstInst(FirstInst, Cmp1, Loc); |
| 2250 | Value *IsConflict = ChkBuilder.CreateAnd(Cmp0, Cmp1, "found.conflict"); |
| 2251 | FirstInst = getFirstInst(FirstInst, IsConflict, Loc); |
| 2252 | if (MemoryRuntimeCheck) { |
| 2253 | IsConflict = |
| 2254 | ChkBuilder.CreateOr(MemoryRuntimeCheck, IsConflict, "conflict.rdx"); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2255 | FirstInst = getFirstInst(FirstInst, IsConflict, Loc); |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2256 | } |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2257 | MemoryRuntimeCheck = IsConflict; |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2258 | } |
| 2259 | |
Adam Nemet | 90fec84 | 2015-04-02 17:51:57 +0000 | [diff] [blame] | 2260 | if (!MemoryRuntimeCheck) |
| 2261 | return std::make_pair(nullptr, nullptr); |
| 2262 | |
Adam Nemet | 7206d7a | 2015-02-06 18:31:04 +0000 | [diff] [blame] | 2263 | // We have to do this trickery because the IRBuilder might fold the check to a |
| 2264 | // constant expression in which case there is no Instruction anchored in a |
| 2265 | // the block. |
| 2266 | Instruction *Check = BinaryOperator::CreateAnd(MemoryRuntimeCheck, |
| 2267 | ConstantInt::getTrue(Ctx)); |
| 2268 | ChkBuilder.Insert(Check, "memcheck.conflict"); |
| 2269 | FirstInst = getFirstInst(FirstInst, Check, Loc); |
| 2270 | return std::make_pair(FirstInst, Check); |
| 2271 | } |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2272 | |
Adam Nemet | 5b0a479 | 2015-08-11 00:09:37 +0000 | [diff] [blame] | 2273 | std::pair<Instruction *, Instruction *> |
| 2274 | LoopAccessInfo::addRuntimeChecks(Instruction *Loc) const { |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2275 | if (!PtrRtChecking->Need) |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2276 | return std::make_pair(nullptr, nullptr); |
| 2277 | |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2278 | return addRuntimeChecks(Loc, PtrRtChecking->getChecks()); |
Adam Nemet | 1da7df3 | 2015-07-26 05:32:14 +0000 | [diff] [blame] | 2279 | } |
| 2280 | |
Adam Nemet | c953bb9 | 2016-06-16 22:57:55 +0000 | [diff] [blame] | 2281 | void LoopAccessInfo::collectStridedAccess(Value *MemAccess) { |
| 2282 | Value *Ptr = nullptr; |
| 2283 | if (LoadInst *LI = dyn_cast<LoadInst>(MemAccess)) |
| 2284 | Ptr = LI->getPointerOperand(); |
| 2285 | else if (StoreInst *SI = dyn_cast<StoreInst>(MemAccess)) |
| 2286 | Ptr = SI->getPointerOperand(); |
| 2287 | else |
| 2288 | return; |
| 2289 | |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2290 | Value *Stride = getStrideFromPointer(Ptr, PSE->getSE(), TheLoop); |
Adam Nemet | c953bb9 | 2016-06-16 22:57:55 +0000 | [diff] [blame] | 2291 | if (!Stride) |
| 2292 | return; |
| 2293 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2294 | LLVM_DEBUG(dbgs() << "LAA: Found a strided access that is a candidate for " |
| 2295 | "versioning:"); |
| 2296 | LLVM_DEBUG(dbgs() << " Ptr: " << *Ptr << " Stride: " << *Stride << "\n"); |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2297 | |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2298 | // Avoid adding the "Stride == 1" predicate when we know that |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2299 | // Stride >= Trip-Count. Such a predicate will effectively optimize a single |
| 2300 | // or zero iteration loop, as Trip-Count <= Stride == 1. |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2301 | // |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2302 | // TODO: We are currently not making a very informed decision on when it is |
| 2303 | // beneficial to apply stride versioning. It might make more sense that the |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2304 | // users of this analysis (such as the vectorizer) will trigger it, based on |
| 2305 | // their specific cost considerations; For example, in cases where stride |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2306 | // versioning does not help resolving memory accesses/dependences, the |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2307 | // vectorizer should evaluate the cost of the runtime test, and the benefit |
| 2308 | // of various possible stride specializations, considering the alternatives |
| 2309 | // of using gather/scatters (if available). |
| 2310 | |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2311 | const SCEV *StrideExpr = PSE->getSCEV(Stride); |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2312 | const SCEV *BETakenCount = PSE->getBackedgeTakenCount(); |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2313 | |
| 2314 | // Match the types so we can compare the stride and the BETakenCount. |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2315 | // The Stride can be positive/negative, so we sign extend Stride; |
Hiroshi Inoue | 02a2bb2 | 2019-02-05 08:30:48 +0000 | [diff] [blame] | 2316 | // The backedgeTakenCount is non-negative, so we zero extend BETakenCount. |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2317 | const DataLayout &DL = TheLoop->getHeader()->getModule()->getDataLayout(); |
| 2318 | uint64_t StrideTypeSize = DL.getTypeAllocSize(StrideExpr->getType()); |
| 2319 | uint64_t BETypeSize = DL.getTypeAllocSize(BETakenCount->getType()); |
| 2320 | const SCEV *CastedStride = StrideExpr; |
| 2321 | const SCEV *CastedBECount = BETakenCount; |
| 2322 | ScalarEvolution *SE = PSE->getSE(); |
| 2323 | if (BETypeSize >= StrideTypeSize) |
| 2324 | CastedStride = SE->getNoopOrSignExtend(StrideExpr, BETakenCount->getType()); |
| 2325 | else |
| 2326 | CastedBECount = SE->getZeroExtendExpr(BETakenCount, StrideExpr->getType()); |
| 2327 | const SCEV *StrideMinusBETaken = SE->getMinusSCEV(CastedStride, CastedBECount); |
| 2328 | // Since TripCount == BackEdgeTakenCount + 1, checking: |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 2329 | // "Stride >= TripCount" is equivalent to checking: |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2330 | // Stride - BETakenCount > 0 |
| 2331 | if (SE->isKnownPositive(StrideMinusBETaken)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2332 | LLVM_DEBUG( |
| 2333 | dbgs() << "LAA: Stride>=TripCount; No point in versioning as the " |
| 2334 | "Stride==1 predicate will imply that the loop executes " |
| 2335 | "at most once.\n"); |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2336 | return; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 2337 | } |
| 2338 | LLVM_DEBUG(dbgs() << "LAA: Found a strided access that we can version."); |
Dorit Nuzman | eb13dd3 | 2017-11-05 16:53:15 +0000 | [diff] [blame] | 2339 | |
Adam Nemet | c953bb9 | 2016-06-16 22:57:55 +0000 | [diff] [blame] | 2340 | SymbolicStrides[Ptr] = Stride; |
| 2341 | StrideSet.insert(Stride); |
| 2342 | } |
| 2343 | |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2344 | LoopAccessInfo::LoopAccessInfo(Loop *L, ScalarEvolution *SE, |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2345 | const TargetLibraryInfo *TLI, AliasAnalysis *AA, |
Adam Nemet | a9f09c6 | 2016-06-17 22:35:41 +0000 | [diff] [blame] | 2346 | DominatorTree *DT, LoopInfo *LI) |
Jonas Devlieghere | 0eaee54 | 2019-08-15 15:54:37 +0000 | [diff] [blame] | 2347 | : PSE(std::make_unique<PredicatedScalarEvolution>(*SE, *L)), |
| 2348 | PtrRtChecking(std::make_unique<RuntimePointerChecking>(SE)), |
| 2349 | DepChecker(std::make_unique<MemoryDepChecker>(*PSE, L)), TheLoop(L), |
Adam Nemet | 7da74ab | 2016-07-13 22:36:35 +0000 | [diff] [blame] | 2350 | NumLoads(0), NumStores(0), MaxSafeDepDistBytes(-1), CanVecMem(false), |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 2351 | HasConvergentOp(false), |
Anna Thomas | 5e9215f | 2018-11-19 15:39:59 +0000 | [diff] [blame] | 2352 | HasDependenceInvolvingLoopInvariantAddress(false) { |
Adam Nemet | 929c38e | 2015-02-19 19:15:10 +0000 | [diff] [blame] | 2353 | if (canAnalyzeLoop()) |
Adam Nemet | 7da74ab | 2016-07-13 22:36:35 +0000 | [diff] [blame] | 2354 | analyzeLoop(AA, LI, TLI, DT); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2355 | } |
| 2356 | |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2357 | void LoopAccessInfo::print(raw_ostream &OS, unsigned Depth) const { |
| 2358 | if (CanVecMem) { |
Adam Nemet | 4ad38b6 | 2016-05-13 22:49:09 +0000 | [diff] [blame] | 2359 | OS.indent(Depth) << "Memory dependences are safe"; |
David Majnemer | 7afb46d | 2016-07-07 06:24:36 +0000 | [diff] [blame] | 2360 | if (MaxSafeDepDistBytes != -1ULL) |
Adam Nemet | c62e554 | 2016-05-13 22:49:13 +0000 | [diff] [blame] | 2361 | OS << " with a maximum dependence distance of " << MaxSafeDepDistBytes |
| 2362 | << " bytes"; |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2363 | if (PtrRtChecking->Need) |
Adam Nemet | 4ad38b6 | 2016-05-13 22:49:09 +0000 | [diff] [blame] | 2364 | OS << " with run-time checks"; |
| 2365 | OS << "\n"; |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2366 | } |
| 2367 | |
Matt Arsenault | 2466ba9 | 2019-06-12 13:34:19 +0000 | [diff] [blame] | 2368 | if (HasConvergentOp) |
| 2369 | OS.indent(Depth) << "Has convergent operation in loop\n"; |
| 2370 | |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2371 | if (Report) |
Adam Nemet | 877ccee | 2016-09-30 00:01:30 +0000 | [diff] [blame] | 2372 | OS.indent(Depth) << "Report: " << Report->getMsg() << "\n"; |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2373 | |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2374 | if (auto *Dependences = DepChecker->getDependences()) { |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 2375 | OS.indent(Depth) << "Dependences:\n"; |
| 2376 | for (auto &Dep : *Dependences) { |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2377 | Dep.print(OS, Depth + 2, DepChecker->getMemoryInstructions()); |
Adam Nemet | 58913d6 | 2015-03-10 17:40:43 +0000 | [diff] [blame] | 2378 | OS << "\n"; |
| 2379 | } |
| 2380 | } else |
Adam Nemet | a2df750 | 2015-11-03 21:39:52 +0000 | [diff] [blame] | 2381 | OS.indent(Depth) << "Too many dependences, not recorded\n"; |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2382 | |
| 2383 | // List the pair of accesses need run-time checks to prove independence. |
Xinliang David Li | ce030ac | 2016-06-22 23:20:59 +0000 | [diff] [blame] | 2384 | PtrRtChecking->print(OS, Depth); |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2385 | OS << "\n"; |
Adam Nemet | c338432 | 2015-05-18 15:36:57 +0000 | [diff] [blame] | 2386 | |
Anna Thomas | 5e9215f | 2018-11-19 15:39:59 +0000 | [diff] [blame] | 2387 | OS.indent(Depth) << "Non vectorizable stores to invariant address were " |
| 2388 | << (HasDependenceInvolvingLoopInvariantAddress ? "" : "not ") |
Adam Nemet | c338432 | 2015-05-18 15:36:57 +0000 | [diff] [blame] | 2389 | << "found in loop.\n"; |
Silviu Baranga | e3c0534 | 2015-11-02 14:41:02 +0000 | [diff] [blame] | 2390 | |
| 2391 | OS.indent(Depth) << "SCEV assumptions:\n"; |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2392 | PSE->getUnionPredicate().print(OS, Depth); |
Silviu Baranga | b77365b | 2016-04-14 16:08:45 +0000 | [diff] [blame] | 2393 | |
| 2394 | OS << "\n"; |
| 2395 | |
| 2396 | OS.indent(Depth) << "Expressions re-written:\n"; |
Xinliang David Li | 94734ee | 2016-07-01 05:59:55 +0000 | [diff] [blame] | 2397 | PSE->print(OS, Depth); |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2398 | } |
| 2399 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2400 | const LoopAccessInfo &LoopAccessLegacyAnalysis::getInfo(Loop *L) { |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2401 | auto &LAI = LoopAccessInfoMap[L]; |
| 2402 | |
Adam Nemet | 1824e41 | 2016-07-13 22:18:51 +0000 | [diff] [blame] | 2403 | if (!LAI) |
Jonas Devlieghere | 0eaee54 | 2019-08-15 15:54:37 +0000 | [diff] [blame] | 2404 | LAI = std::make_unique<LoopAccessInfo>(L, SE, TLI, AA, DT, LI); |
Adam Nemet | 1824e41 | 2016-07-13 22:18:51 +0000 | [diff] [blame] | 2405 | |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2406 | return *LAI.get(); |
| 2407 | } |
| 2408 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2409 | void LoopAccessLegacyAnalysis::print(raw_ostream &OS, const Module *M) const { |
| 2410 | LoopAccessLegacyAnalysis &LAA = *const_cast<LoopAccessLegacyAnalysis *>(this); |
Xinliang David Li | ecde1c7 | 2016-06-09 03:22:39 +0000 | [diff] [blame] | 2411 | |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2412 | for (Loop *TopLevelLoop : *LI) |
| 2413 | for (Loop *L : depth_first(TopLevelLoop)) { |
| 2414 | OS.indent(2) << L->getHeader()->getName() << ":\n"; |
Adam Nemet | bdbc522 | 2016-06-16 08:26:56 +0000 | [diff] [blame] | 2415 | auto &LAI = LAA.getInfo(L); |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2416 | LAI.print(OS, 4); |
| 2417 | } |
| 2418 | } |
| 2419 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2420 | bool LoopAccessLegacyAnalysis::runOnFunction(Function &F) { |
Xinliang David Li | ecde1c7 | 2016-06-09 03:22:39 +0000 | [diff] [blame] | 2421 | SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2422 | auto *TLIP = getAnalysisIfAvailable<TargetLibraryInfoWrapperPass>(); |
Teresa Johnson | 9c27b59 | 2019-09-07 03:09:36 +0000 | [diff] [blame^] | 2423 | TLI = TLIP ? &TLIP->getTLI(F) : nullptr; |
Xinliang David Li | ecde1c7 | 2016-06-09 03:22:39 +0000 | [diff] [blame] | 2424 | AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); |
| 2425 | DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); |
| 2426 | LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2427 | |
| 2428 | return false; |
| 2429 | } |
| 2430 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2431 | void LoopAccessLegacyAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 2432 | AU.addRequired<ScalarEvolutionWrapperPass>(); |
Chandler Carruth | 7b560d4 | 2015-09-09 17:55:00 +0000 | [diff] [blame] | 2433 | AU.addRequired<AAResultsWrapperPass>(); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2434 | AU.addRequired<DominatorTreeWrapperPass>(); |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2435 | AU.addRequired<LoopInfoWrapperPass>(); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2436 | |
| 2437 | AU.setPreservesAll(); |
| 2438 | } |
| 2439 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2440 | char LoopAccessLegacyAnalysis::ID = 0; |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2441 | static const char laa_name[] = "Loop Access Analysis"; |
| 2442 | #define LAA_NAME "loop-accesses" |
| 2443 | |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2444 | INITIALIZE_PASS_BEGIN(LoopAccessLegacyAnalysis, LAA_NAME, laa_name, false, true) |
Chandler Carruth | 7b560d4 | 2015-09-09 17:55:00 +0000 | [diff] [blame] | 2445 | INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass) |
Chandler Carruth | 2f1fd16 | 2015-08-17 02:08:17 +0000 | [diff] [blame] | 2446 | INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass) |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2447 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) |
Adam Nemet | e91cc6e | 2015-02-19 19:15:19 +0000 | [diff] [blame] | 2448 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2449 | INITIALIZE_PASS_END(LoopAccessLegacyAnalysis, LAA_NAME, laa_name, false, true) |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2450 | |
Chandler Carruth | dab4eae | 2016-11-23 17:53:26 +0000 | [diff] [blame] | 2451 | AnalysisKey LoopAccessAnalysis::Key; |
Xinliang David Li | 8a02131 | 2016-07-02 21:18:40 +0000 | [diff] [blame] | 2452 | |
Chandler Carruth | 410eaeb | 2017-01-11 06:23:21 +0000 | [diff] [blame] | 2453 | LoopAccessInfo LoopAccessAnalysis::run(Loop &L, LoopAnalysisManager &AM, |
| 2454 | LoopStandardAnalysisResults &AR) { |
| 2455 | return LoopAccessInfo(&L, &AR.SE, &AR.TLI, &AR.AA, &AR.DT, &AR.LI); |
Xinliang David Li | 8a02131 | 2016-07-02 21:18:40 +0000 | [diff] [blame] | 2456 | } |
| 2457 | |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2458 | namespace llvm { |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 2459 | |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2460 | Pass *createLAAPass() { |
Xinliang David Li | 7853c1d | 2016-07-08 20:55:26 +0000 | [diff] [blame] | 2461 | return new LoopAccessLegacyAnalysis(); |
Adam Nemet | 3bfd93d | 2015-02-19 19:15:04 +0000 | [diff] [blame] | 2462 | } |
Eugene Zelenko | a3fe70d | 2016-11-30 17:48:10 +0000 | [diff] [blame] | 2463 | |
| 2464 | } // end namespace llvm |