blob: ed58acac078a61a4157547d133f62665c2f3d033 [file] [log] [blame]
Adam Nemet9d9cb272016-02-18 21:38:19 +00001//===-------- LoopDataPrefetch.cpp - Loop Data Prefetching Pass -----------===//
Hal Finkele5aaf3f2015-02-20 05:08:21 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements a Loop Data Prefetching Pass.
11//
12//===----------------------------------------------------------------------===//
13
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000014#include "llvm/Transforms/Scalar/LoopDataPrefetch.h"
15
Adam Nemet7cf9b1b2016-02-18 21:37:12 +000016#define DEBUG_TYPE "loop-data-prefetch"
Hal Finkela9fceb82015-04-10 15:05:02 +000017#include "llvm/ADT/DepthFirstIterator.h"
Hal Finkele5aaf3f2015-02-20 05:08:21 +000018#include "llvm/ADT/Statistic.h"
19#include "llvm/Analysis/AssumptionCache.h"
20#include "llvm/Analysis/CodeMetrics.h"
21#include "llvm/Analysis/InstructionSimplify.h"
22#include "llvm/Analysis/LoopInfo.h"
Adam Nemet9e6e63f2016-07-22 22:53:17 +000023#include "llvm/Analysis/OptimizationDiagnosticInfo.h"
Hal Finkele5aaf3f2015-02-20 05:08:21 +000024#include "llvm/Analysis/ScalarEvolution.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000025#include "llvm/Analysis/ScalarEvolutionAliasAnalysis.h"
Hal Finkele5aaf3f2015-02-20 05:08:21 +000026#include "llvm/Analysis/ScalarEvolutionExpander.h"
27#include "llvm/Analysis/ScalarEvolutionExpressions.h"
28#include "llvm/Analysis/TargetTransformInfo.h"
29#include "llvm/Analysis/ValueTracking.h"
30#include "llvm/IR/CFG.h"
31#include "llvm/IR/Dominators.h"
32#include "llvm/IR/Function.h"
33#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Module.h"
35#include "llvm/Support/CommandLine.h"
36#include "llvm/Support/Debug.h"
Adam Nemet885f1de2016-07-22 22:53:12 +000037#include "llvm/Transforms/Scalar.h"
Hal Finkele5aaf3f2015-02-20 05:08:21 +000038#include "llvm/Transforms/Utils/BasicBlockUtils.h"
39#include "llvm/Transforms/Utils/Local.h"
40#include "llvm/Transforms/Utils/ValueMapper.h"
41using namespace llvm;
42
43// By default, we limit this to creating 16 PHIs (which is a little over half
44// of the allocatable register set).
45static cl::opt<bool>
Adam Nemet7cf9b1b2016-02-18 21:37:12 +000046PrefetchWrites("loop-prefetch-writes", cl::Hidden, cl::init(false),
Hal Finkele5aaf3f2015-02-20 05:08:21 +000047 cl::desc("Prefetch write addresses"));
48
Adam Nemet1428d412016-03-29 23:45:52 +000049static cl::opt<unsigned>
50 PrefetchDistance("prefetch-distance",
51 cl::desc("Number of instructions to prefetch ahead"),
52 cl::Hidden);
53
54static cl::opt<unsigned>
55 MinPrefetchStride("min-prefetch-stride",
56 cl::desc("Min stride to add prefetches"), cl::Hidden);
57
58static cl::opt<unsigned> MaxPrefetchIterationsAhead(
59 "max-prefetch-iters-ahead",
60 cl::desc("Max number of iterations to prefetch ahead"), cl::Hidden);
61
Adam Nemet34785ec2016-03-09 05:33:21 +000062STATISTIC(NumPrefetches, "Number of prefetches inserted");
63
Hal Finkele5aaf3f2015-02-20 05:08:21 +000064namespace {
65
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000066/// Loop prefetch implementation class.
67class LoopDataPrefetch {
68public:
69 LoopDataPrefetch(AssumptionCache *AC, LoopInfo *LI, ScalarEvolution *SE,
70 const TargetTransformInfo *TTI,
71 OptimizationRemarkEmitter *ORE)
72 : AC(AC), LI(LI), SE(SE), TTI(TTI), ORE(ORE) {}
Hal Finkele5aaf3f2015-02-20 05:08:21 +000073
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000074 bool run();
Hal Finkele5aaf3f2015-02-20 05:08:21 +000075
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000076private:
77 bool runOnLoop(Loop *L);
Adam Nemet85fba392016-03-29 22:40:02 +000078
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000079 /// \brief Check if the the stride of the accesses is large enough to
80 /// warrant a prefetch.
81 bool isStrideLargeEnough(const SCEVAddRecExpr *AR);
Hal Finkele5aaf3f2015-02-20 05:08:21 +000082
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000083 unsigned getMinPrefetchStride() {
84 if (MinPrefetchStride.getNumOccurrences() > 0)
85 return MinPrefetchStride;
86 return TTI->getMinPrefetchStride();
87 }
Adam Nemet6d8beec2016-03-18 00:27:38 +000088
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000089 unsigned getPrefetchDistance() {
90 if (PrefetchDistance.getNumOccurrences() > 0)
91 return PrefetchDistance;
92 return TTI->getPrefetchDistance();
93 }
Adam Nemet1428d412016-03-29 23:45:52 +000094
Teresa Johnson1eca6bc2016-08-13 04:11:27 +000095 unsigned getMaxPrefetchIterationsAhead() {
96 if (MaxPrefetchIterationsAhead.getNumOccurrences() > 0)
97 return MaxPrefetchIterationsAhead;
98 return TTI->getMaxPrefetchIterationsAhead();
99 }
Adam Nemet1428d412016-03-29 23:45:52 +0000100
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000101 AssumptionCache *AC;
102 LoopInfo *LI;
103 ScalarEvolution *SE;
104 const TargetTransformInfo *TTI;
105 OptimizationRemarkEmitter *ORE;
106};
Adam Nemet1428d412016-03-29 23:45:52 +0000107
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000108/// Legacy class for inserting loop data prefetches.
109class LoopDataPrefetchLegacyPass : public FunctionPass {
110public:
111 static char ID; // Pass ID, replacement for typeid
112 LoopDataPrefetchLegacyPass() : FunctionPass(ID) {
113 initializeLoopDataPrefetchLegacyPassPass(*PassRegistry::getPassRegistry());
114 }
115
116 void getAnalysisUsage(AnalysisUsage &AU) const override {
117 AU.addRequired<AssumptionCacheTracker>();
118 AU.addPreserved<DominatorTreeWrapperPass>();
119 AU.addRequired<LoopInfoWrapperPass>();
120 AU.addPreserved<LoopInfoWrapperPass>();
121 AU.addRequired<OptimizationRemarkEmitterWrapperPass>();
122 AU.addRequired<ScalarEvolutionWrapperPass>();
123 // FIXME: For some reason, preserving SE here breaks LSR (even if
124 // this pass changes nothing).
125 // AU.addPreserved<ScalarEvolutionWrapperPass>();
126 AU.addRequired<TargetTransformInfoWrapperPass>();
127 }
128
129 bool runOnFunction(Function &F) override;
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000130 };
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000131}
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000132
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000133char LoopDataPrefetchLegacyPass::ID = 0;
134INITIALIZE_PASS_BEGIN(LoopDataPrefetchLegacyPass, "loop-data-prefetch",
Adam Nemet7cf9b1b2016-02-18 21:37:12 +0000135 "Loop Data Prefetch", false, false)
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000136INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
137INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
138INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass)
Adam Nemet9e6e63f2016-07-22 22:53:17 +0000139INITIALIZE_PASS_DEPENDENCY(OptimizationRemarkEmitterWrapperPass)
Chandler Carruth2f1fd162015-08-17 02:08:17 +0000140INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass)
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000141INITIALIZE_PASS_END(LoopDataPrefetchLegacyPass, "loop-data-prefetch",
Adam Nemet7cf9b1b2016-02-18 21:37:12 +0000142 "Loop Data Prefetch", false, false)
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000143
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000144FunctionPass *llvm::createLoopDataPrefetchPass() {
145 return new LoopDataPrefetchLegacyPass();
146}
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000147
Adam Nemet6d8beec2016-03-18 00:27:38 +0000148bool LoopDataPrefetch::isStrideLargeEnough(const SCEVAddRecExpr *AR) {
Adam Nemet1428d412016-03-29 23:45:52 +0000149 unsigned TargetMinStride = getMinPrefetchStride();
Adam Nemet6d8beec2016-03-18 00:27:38 +0000150 // No need to check if any stride goes.
151 if (TargetMinStride <= 1)
152 return true;
153
154 const auto *ConstStride = dyn_cast<SCEVConstant>(AR->getStepRecurrence(*SE));
155 // If MinStride is set, don't prefetch unless we can ensure that stride is
156 // larger.
157 if (!ConstStride)
158 return false;
159
160 unsigned AbsStride = std::abs(ConstStride->getAPInt().getSExtValue());
161 return TargetMinStride <= AbsStride;
162}
163
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000164PreservedAnalyses LoopDataPrefetchPass::run(Function &F,
165 FunctionAnalysisManager &AM) {
166 LoopInfo *LI = &AM.getResult<LoopAnalysis>(F);
167 ScalarEvolution *SE = &AM.getResult<ScalarEvolutionAnalysis>(F);
168 AssumptionCache *AC = &AM.getResult<AssumptionAnalysis>(F);
169 OptimizationRemarkEmitter *ORE =
170 &AM.getResult<OptimizationRemarkEmitterAnalysis>(F);
171 const TargetTransformInfo *TTI = &AM.getResult<TargetIRAnalysis>(F);
172
173 LoopDataPrefetch LDP(AC, LI, SE, TTI, ORE);
174 bool Changed = LDP.run();
175
176 if (Changed) {
177 PreservedAnalyses PA;
178 PA.preserve<DominatorTreeAnalysis>();
179 PA.preserve<LoopAnalysis>();
180 return PA;
181 }
182
183 return PreservedAnalyses::all();
184}
185
186bool LoopDataPrefetchLegacyPass::runOnFunction(Function &F) {
Andrew Kaylor50271f72016-05-03 22:32:30 +0000187 if (skipFunction(F))
188 return false;
189
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000190 LoopInfo *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
191 ScalarEvolution *SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE();
192 AssumptionCache *AC =
193 &getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
194 OptimizationRemarkEmitter *ORE =
195 &getAnalysis<OptimizationRemarkEmitterWrapperPass>().getORE();
196 const TargetTransformInfo *TTI =
197 &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000198
Teresa Johnson1eca6bc2016-08-13 04:11:27 +0000199 LoopDataPrefetch LDP(AC, LI, SE, TTI, ORE);
200 return LDP.run();
201}
202
203bool LoopDataPrefetch::run() {
Adam Nemetbb3680b2016-03-07 18:35:42 +0000204 // If PrefetchDistance is not set, don't run the pass. This gives an
205 // opportunity for targets to run this pass for selected subtargets only
206 // (whose TTI sets PrefetchDistance).
Adam Nemet1428d412016-03-29 23:45:52 +0000207 if (getPrefetchDistance() == 0)
Adam Nemetbb3680b2016-03-07 18:35:42 +0000208 return false;
Adam Nemetaf761102016-01-21 18:28:36 +0000209 assert(TTI->getCacheLineSize() && "Cache line size is not set for target");
210
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000211 bool MadeChange = false;
212
Benjamin Kramer135f7352016-06-26 12:28:59 +0000213 for (Loop *I : *LI)
214 for (auto L = df_begin(I), LE = df_end(I); L != LE; ++L)
Hal Finkel5551f252015-04-12 17:18:56 +0000215 MadeChange |= runOnLoop(*L);
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000216
217 return MadeChange;
218}
219
Adam Nemet7cf9b1b2016-02-18 21:37:12 +0000220bool LoopDataPrefetch::runOnLoop(Loop *L) {
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000221 bool MadeChange = false;
222
223 // Only prefetch in the inner-most loop
224 if (!L->empty())
225 return MadeChange;
226
227 SmallPtrSet<const Value *, 32> EphValues;
228 CodeMetrics::collectEphemeralValues(L, AC, EphValues);
229
230 // Calculate the number of iterations ahead to prefetch
231 CodeMetrics Metrics;
232 for (Loop::block_iterator I = L->block_begin(), IE = L->block_end();
233 I != IE; ++I) {
234
235 // If the loop already has prefetches, then assume that the user knows
Nico Weber2cf5e892016-06-10 20:06:03 +0000236 // what they are doing and don't add any more.
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000237 for (BasicBlock::iterator J = (*I)->begin(), JE = (*I)->end();
238 J != JE; ++J)
239 if (CallInst *CI = dyn_cast<CallInst>(J))
240 if (Function *F = CI->getCalledFunction())
241 if (F->getIntrinsicID() == Intrinsic::prefetch)
242 return MadeChange;
243
244 Metrics.analyzeBasicBlock(*I, *TTI, EphValues);
245 }
246 unsigned LoopSize = Metrics.NumInsts;
247 if (!LoopSize)
248 LoopSize = 1;
249
Adam Nemet1428d412016-03-29 23:45:52 +0000250 unsigned ItersAhead = getPrefetchDistance() / LoopSize;
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000251 if (!ItersAhead)
252 ItersAhead = 1;
253
Adam Nemet1428d412016-03-29 23:45:52 +0000254 if (ItersAhead > getMaxPrefetchIterationsAhead())
Adam Nemet709e3042016-03-18 00:27:43 +0000255 return MadeChange;
256
Adam Nemet34785ec2016-03-09 05:33:21 +0000257 DEBUG(dbgs() << "Prefetching " << ItersAhead
258 << " iterations ahead (loop size: " << LoopSize << ") in "
Adam Nemeteea7c262016-07-22 23:08:10 +0000259 << L->getHeader()->getParent()->getName() << ": " << *L);
Adam Nemet34785ec2016-03-09 05:33:21 +0000260
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000261 SmallVector<std::pair<Instruction *, const SCEVAddRecExpr *>, 16> PrefLoads;
262 for (Loop::block_iterator I = L->block_begin(), IE = L->block_end();
263 I != IE; ++I) {
264 for (BasicBlock::iterator J = (*I)->begin(), JE = (*I)->end();
265 J != JE; ++J) {
266 Value *PtrValue;
267 Instruction *MemI;
268
269 if (LoadInst *LMemI = dyn_cast<LoadInst>(J)) {
270 MemI = LMemI;
271 PtrValue = LMemI->getPointerOperand();
272 } else if (StoreInst *SMemI = dyn_cast<StoreInst>(J)) {
273 if (!PrefetchWrites) continue;
274 MemI = SMemI;
275 PtrValue = SMemI->getPointerOperand();
276 } else continue;
277
278 unsigned PtrAddrSpace = PtrValue->getType()->getPointerAddressSpace();
279 if (PtrAddrSpace)
280 continue;
281
282 if (L->isLoopInvariant(PtrValue))
283 continue;
284
285 const SCEV *LSCEV = SE->getSCEV(PtrValue);
286 const SCEVAddRecExpr *LSCEVAddRec = dyn_cast<SCEVAddRecExpr>(LSCEV);
287 if (!LSCEVAddRec)
288 continue;
289
Adam Nemet6d8beec2016-03-18 00:27:38 +0000290 // Check if the the stride of the accesses is large enough to warrant a
291 // prefetch.
292 if (!isStrideLargeEnough(LSCEVAddRec))
293 continue;
294
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000295 // We don't want to double prefetch individual cache lines. If this load
296 // is known to be within one cache line of some other load that has
297 // already been prefetched, then don't prefetch this one as well.
298 bool DupPref = false;
Benjamin Kramer135f7352016-06-26 12:28:59 +0000299 for (const auto &PrefLoad : PrefLoads) {
300 const SCEV *PtrDiff = SE->getMinusSCEV(LSCEVAddRec, PrefLoad.second);
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000301 if (const SCEVConstant *ConstPtrDiff =
302 dyn_cast<SCEVConstant>(PtrDiff)) {
Benjamin Kramer7bd1f7c2015-03-09 20:20:16 +0000303 int64_t PD = std::abs(ConstPtrDiff->getValue()->getSExtValue());
Adam Nemetaf761102016-01-21 18:28:36 +0000304 if (PD < (int64_t) TTI->getCacheLineSize()) {
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000305 DupPref = true;
306 break;
307 }
308 }
309 }
310 if (DupPref)
311 continue;
312
313 const SCEV *NextLSCEV = SE->getAddExpr(LSCEVAddRec, SE->getMulExpr(
314 SE->getConstant(LSCEVAddRec->getType(), ItersAhead),
315 LSCEVAddRec->getStepRecurrence(*SE)));
316 if (!isSafeToExpand(NextLSCEV, *SE))
317 continue;
318
319 PrefLoads.push_back(std::make_pair(MemI, LSCEVAddRec));
320
321 Type *I8Ptr = Type::getInt8PtrTy((*I)->getContext(), PtrAddrSpace);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000322 SCEVExpander SCEVE(*SE, J->getModule()->getDataLayout(), "prefaddr");
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000323 Value *PrefPtrValue = SCEVE.expandCodeFor(NextLSCEV, I8Ptr, MemI);
324
325 IRBuilder<> Builder(MemI);
326 Module *M = (*I)->getParent()->getParent();
327 Type *I32 = Type::getInt32Ty((*I)->getContext());
328 Value *PrefetchFunc = Intrinsic::getDeclaration(M, Intrinsic::prefetch);
David Blaikieff6409d2015-05-18 22:13:54 +0000329 Builder.CreateCall(
330 PrefetchFunc,
331 {PrefPtrValue,
332 ConstantInt::get(I32, MemI->mayReadFromMemory() ? 0 : 1),
333 ConstantInt::get(I32, 3), ConstantInt::get(I32, 1)});
Adam Nemet34785ec2016-03-09 05:33:21 +0000334 ++NumPrefetches;
335 DEBUG(dbgs() << " Access: " << *PtrValue << ", SCEV: " << *LSCEV
336 << "\n");
Adam Nemet9e6e63f2016-07-22 22:53:17 +0000337 ORE->emitOptimizationRemark(DEBUG_TYPE, MemI, "prefetched memory access");
Hal Finkele5aaf3f2015-02-20 05:08:21 +0000338
339 MadeChange = true;
340 }
341 }
342
343 return MadeChange;
344}
345