Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 1 | //===-- LoopIdiomRecognize.cpp - Loop idiom recognition -------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements an idiom recognizer that transforms simple loops into a |
| 11 | // non-loop form. In cases that this kicks in, it can be a significant |
| 12 | // performance win. |
| 13 | // |
| 14 | //===----------------------------------------------------------------------===// |
Chris Lattner | bdce572 | 2011-01-02 18:32:09 +0000 | [diff] [blame] | 15 | // |
| 16 | // TODO List: |
| 17 | // |
| 18 | // Future loop memory idioms to recognize: |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 19 | // memcmp, strlen, etc. |
Chris Lattner | bdce572 | 2011-01-02 18:32:09 +0000 | [diff] [blame] | 20 | // Future floating point idioms to recognize in -ffast-math mode: |
| 21 | // fpowi |
| 22 | // Future integer operation idioms to recognize: |
| 23 | // ctpop, ctlz, cttz |
| 24 | // |
| 25 | // Beware that isel's default lowering for ctpop is highly inefficient for |
| 26 | // i64 and larger types when i64 is legal and the value has few bits set. It |
| 27 | // would be good to enhance isel to emit a loop for ctpop in this case. |
| 28 | // |
| 29 | // We should enhance the memset/memcpy recognition to handle multiple stores in |
| 30 | // the loop. This would handle things like: |
| 31 | // void foo(_Complex float *P) |
| 32 | // for (i) { __real__(*P) = 0; __imag__(*P) = 0; } |
Chris Lattner | 91139cc | 2011-01-02 23:19:45 +0000 | [diff] [blame] | 33 | // |
Chris Lattner | 408b534 | 2011-02-21 02:08:54 +0000 | [diff] [blame] | 34 | // We should enhance this to handle negative strides through memory. |
| 35 | // Alternatively (and perhaps better) we could rely on an earlier pass to force |
| 36 | // forward iteration through memory, which is generally better for cache |
| 37 | // behavior. Negative strides *do* happen for memset/memcpy loops. |
| 38 | // |
Chris Lattner | d957c71 | 2011-01-03 01:10:08 +0000 | [diff] [blame] | 39 | // This could recognize common matrix multiplies and dot product idioms and |
Chris Lattner | 91139cc | 2011-01-02 23:19:45 +0000 | [diff] [blame] | 40 | // replace them with calls to BLAS (if linked in??). |
| 41 | // |
Chris Lattner | bdce572 | 2011-01-02 18:32:09 +0000 | [diff] [blame] | 42 | //===----------------------------------------------------------------------===// |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 43 | |
| 44 | #define DEBUG_TYPE "loop-idiom" |
| 45 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | 06cb8ed | 2012-06-29 12:38:19 +0000 | [diff] [blame] | 46 | #include "llvm/IRBuilder.h" |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 47 | #include "llvm/IntrinsicInst.h" |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 48 | #include "llvm/Module.h" |
Chandler Carruth | 06cb8ed | 2012-06-29 12:38:19 +0000 | [diff] [blame] | 49 | #include "llvm/ADT/Statistic.h" |
Chris Lattner | 2e12f1a | 2010-12-27 18:39:08 +0000 | [diff] [blame] | 50 | #include "llvm/Analysis/AliasAnalysis.h" |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 51 | #include "llvm/Analysis/DependenceAnalysis.h" |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 52 | #include "llvm/Analysis/LoopPass.h" |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 53 | #include "llvm/Analysis/ScalarEvolutionExpander.h" |
Chandler Carruth | 06cb8ed | 2012-06-29 12:38:19 +0000 | [diff] [blame] | 54 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 55 | #include "llvm/Analysis/ValueTracking.h" |
Chandler Carruth | 06cb8ed | 2012-06-29 12:38:19 +0000 | [diff] [blame] | 56 | #include "llvm/Support/Debug.h" |
| 57 | #include "llvm/Support/raw_ostream.h" |
Micah Villmow | 3574eca | 2012-10-08 16:38:25 +0000 | [diff] [blame] | 58 | #include "llvm/DataLayout.h" |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 59 | #include "llvm/Target/TargetLibraryInfo.h" |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 60 | #include "llvm/Transforms/Utils/Local.h" |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 61 | using namespace llvm; |
| 62 | |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 63 | STATISTIC(NumMemSet, "Number of memsets formed from loop stores"); |
| 64 | STATISTIC(NumMemCpy, "Number of memcpys formed from loop load+stores"); |
| 65 | STATISTIC(NumMemMove, "Number of memmoves formed from loop load+stores"); |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 66 | |
| 67 | namespace { |
| 68 | class LoopIdiomRecognize : public LoopPass { |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 69 | Loop *CurLoop; |
Micah Villmow | 3574eca | 2012-10-08 16:38:25 +0000 | [diff] [blame] | 70 | const DataLayout *TD; |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 71 | DominatorTree *DT; |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 72 | ScalarEvolution *SE; |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 73 | TargetLibraryInfo *TLI; |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 74 | public: |
| 75 | static char ID; |
| 76 | explicit LoopIdiomRecognize() : LoopPass(ID) { |
| 77 | initializeLoopIdiomRecognizePass(*PassRegistry::getPassRegistry()); |
| 78 | } |
| 79 | |
| 80 | bool runOnLoop(Loop *L, LPPassManager &LPM); |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 81 | bool runOnLoopBlock(BasicBlock *BB, const SCEV *BECount, |
| 82 | SmallVectorImpl<BasicBlock*> &ExitBlocks); |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 83 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 84 | bool processLoopStore(StoreInst *SI, const SCEV *BECount); |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 85 | bool processLoopMemSet(MemSetInst *MSI, const SCEV *BECount); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 86 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 87 | bool processLoopStridedStore(Value *DestPtr, unsigned StoreSize, |
| 88 | unsigned StoreAlignment, |
| 89 | Value *SplatValue, Instruction *TheStore, |
| 90 | const SCEVAddRecExpr *Ev, |
| 91 | const SCEV *BECount); |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 92 | bool processLoopStoreOfLoopLoad(StoreInst *SI, unsigned StoreSize, |
| 93 | const SCEVAddRecExpr *StoreEv, |
| 94 | const SCEVAddRecExpr *LoadEv, |
| 95 | const SCEV *BECount); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 96 | |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 97 | /// This transformation requires natural loop information & requires that |
| 98 | /// loop preheaders be inserted into the CFG. |
| 99 | /// |
| 100 | virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
| 101 | AU.addRequired<LoopInfo>(); |
| 102 | AU.addPreserved<LoopInfo>(); |
| 103 | AU.addRequiredID(LoopSimplifyID); |
| 104 | AU.addPreservedID(LoopSimplifyID); |
| 105 | AU.addRequiredID(LCSSAID); |
| 106 | AU.addPreservedID(LCSSAID); |
Chris Lattner | 2e12f1a | 2010-12-27 18:39:08 +0000 | [diff] [blame] | 107 | AU.addRequired<AliasAnalysis>(); |
| 108 | AU.addPreserved<AliasAnalysis>(); |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 109 | AU.addRequired<ScalarEvolution>(); |
| 110 | AU.addPreserved<ScalarEvolution>(); |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 111 | AU.addRequired<DependenceAnalysis>(); |
| 112 | AU.addPreserved<DependenceAnalysis>(); |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 113 | AU.addPreserved<DominatorTree>(); |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 114 | AU.addRequired<DominatorTree>(); |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 115 | AU.addRequired<TargetLibraryInfo>(); |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 116 | } |
| 117 | }; |
| 118 | } |
| 119 | |
| 120 | char LoopIdiomRecognize::ID = 0; |
| 121 | INITIALIZE_PASS_BEGIN(LoopIdiomRecognize, "loop-idiom", "Recognize loop idioms", |
| 122 | false, false) |
| 123 | INITIALIZE_PASS_DEPENDENCY(LoopInfo) |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 124 | INITIALIZE_PASS_DEPENDENCY(DominatorTree) |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 125 | INITIALIZE_PASS_DEPENDENCY(LoopSimplify) |
| 126 | INITIALIZE_PASS_DEPENDENCY(LCSSA) |
| 127 | INITIALIZE_PASS_DEPENDENCY(ScalarEvolution) |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 128 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfo) |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 129 | INITIALIZE_PASS_DEPENDENCY(DependenceAnalysis) |
Chris Lattner | 2e12f1a | 2010-12-27 18:39:08 +0000 | [diff] [blame] | 130 | INITIALIZE_AG_DEPENDENCY(AliasAnalysis) |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 131 | INITIALIZE_PASS_END(LoopIdiomRecognize, "loop-idiom", "Recognize loop idioms", |
| 132 | false, false) |
| 133 | |
| 134 | Pass *llvm::createLoopIdiomPass() { return new LoopIdiomRecognize(); } |
| 135 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 136 | /// deleteDeadInstruction - Delete this instruction. Before we do, go through |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 137 | /// and zero out all the operands of this instruction. If any of them become |
| 138 | /// dead, delete them and the computation tree that feeds them. |
| 139 | /// |
Benjamin Kramer | 8e0d1c0 | 2012-08-29 15:32:21 +0000 | [diff] [blame] | 140 | static void deleteDeadInstruction(Instruction *I, ScalarEvolution &SE, |
| 141 | const TargetLibraryInfo *TLI) { |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 142 | SmallVector<Instruction*, 32> NowDeadInsts; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 143 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 144 | NowDeadInsts.push_back(I); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 145 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 146 | // Before we touch this instruction, remove it from SE! |
| 147 | do { |
| 148 | Instruction *DeadInst = NowDeadInsts.pop_back_val(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 149 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 150 | // This instruction is dead, zap it, in stages. Start by removing it from |
| 151 | // SCEV. |
| 152 | SE.forgetValue(DeadInst); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 153 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 154 | for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) { |
| 155 | Value *Op = DeadInst->getOperand(op); |
| 156 | DeadInst->setOperand(op, 0); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 157 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 158 | // If this operand just became dead, add it to the NowDeadInsts list. |
| 159 | if (!Op->use_empty()) continue; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 160 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 161 | if (Instruction *OpI = dyn_cast<Instruction>(Op)) |
Benjamin Kramer | 8e0d1c0 | 2012-08-29 15:32:21 +0000 | [diff] [blame] | 162 | if (isInstructionTriviallyDead(OpI, TLI)) |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 163 | NowDeadInsts.push_back(OpI); |
| 164 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 165 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 166 | DeadInst->eraseFromParent(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 167 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 168 | } while (!NowDeadInsts.empty()); |
| 169 | } |
| 170 | |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 171 | bool LoopIdiomRecognize::runOnLoop(Loop *L, LPPassManager &LPM) { |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 172 | CurLoop = L; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 173 | |
Benjamin Kramer | 28aff84 | 2012-09-21 17:27:23 +0000 | [diff] [blame] | 174 | // If the loop could not be converted to canonical form, it must have an |
| 175 | // indirectbr in it, just give up. |
| 176 | if (!L->getLoopPreheader()) |
| 177 | return false; |
| 178 | |
Nadav Rotem | a94d6e8 | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 179 | // Disable loop idiom recognition if the function's name is a common idiom. |
Chad Rosier | 71400b6 | 2011-07-15 18:25:04 +0000 | [diff] [blame] | 180 | StringRef Name = L->getHeader()->getParent()->getName(); |
Benjamin Kramer | badffcf | 2012-10-27 15:18:28 +0000 | [diff] [blame] | 181 | if (Name == "memset" || Name == "memcpy" || Name == "memmove") |
Chad Rosier | 71400b6 | 2011-07-15 18:25:04 +0000 | [diff] [blame] | 182 | return false; |
| 183 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 184 | // The trip count of the loop must be analyzable. |
| 185 | SE = &getAnalysis<ScalarEvolution>(); |
| 186 | if (!SE->hasLoopInvariantBackedgeTakenCount(L)) |
| 187 | return false; |
| 188 | const SCEV *BECount = SE->getBackedgeTakenCount(L); |
| 189 | if (isa<SCEVCouldNotCompute>(BECount)) return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 190 | |
Chris Lattner | 8e08e73 | 2011-01-02 20:24:21 +0000 | [diff] [blame] | 191 | // If this loop executes exactly one time, then it should be peeled, not |
| 192 | // optimized by this pass. |
| 193 | if (const SCEVConstant *BECst = dyn_cast<SCEVConstant>(BECount)) |
| 194 | if (BECst->getValue()->getValue() == 0) |
| 195 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 196 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 197 | // We require target data for now. |
Micah Villmow | 3574eca | 2012-10-08 16:38:25 +0000 | [diff] [blame] | 198 | TD = getAnalysisIfAvailable<DataLayout>(); |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 199 | if (TD == 0) return false; |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 200 | |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 201 | DT = &getAnalysis<DominatorTree>(); |
| 202 | LoopInfo &LI = getAnalysis<LoopInfo>(); |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 203 | TLI = &getAnalysis<TargetLibraryInfo>(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 204 | |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 205 | SmallVector<BasicBlock*, 8> ExitBlocks; |
| 206 | CurLoop->getUniqueExitBlocks(ExitBlocks); |
| 207 | |
Chris Lattner | 63f9c3c | 2011-01-02 21:14:18 +0000 | [diff] [blame] | 208 | DEBUG(dbgs() << "loop-idiom Scanning: F[" |
| 209 | << L->getHeader()->getParent()->getName() |
| 210 | << "] Loop %" << L->getHeader()->getName() << "\n"); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 211 | |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 212 | bool MadeChange = false; |
| 213 | // Scan all the blocks in the loop that are not in subloops. |
| 214 | for (Loop::block_iterator BI = L->block_begin(), E = L->block_end(); BI != E; |
| 215 | ++BI) { |
| 216 | // Ignore blocks in subloops. |
| 217 | if (LI.getLoopFor(*BI) != CurLoop) |
| 218 | continue; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 219 | |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 220 | MadeChange |= runOnLoopBlock(*BI, BECount, ExitBlocks); |
| 221 | } |
| 222 | return MadeChange; |
| 223 | } |
| 224 | |
| 225 | /// runOnLoopBlock - Process the specified block, which lives in a counted loop |
| 226 | /// with the specified backedge count. This block is known to be in the current |
| 227 | /// loop and not in any subloops. |
| 228 | bool LoopIdiomRecognize::runOnLoopBlock(BasicBlock *BB, const SCEV *BECount, |
| 229 | SmallVectorImpl<BasicBlock*> &ExitBlocks) { |
| 230 | // We can only promote stores in this block if they are unconditionally |
| 231 | // executed in the loop. For a block to be unconditionally executed, it has |
| 232 | // to dominate all the exit blocks of the loop. Verify this now. |
| 233 | for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) |
| 234 | if (!DT->dominates(BB, ExitBlocks[i])) |
| 235 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 236 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 237 | bool MadeChange = false; |
| 238 | for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ) { |
Chris Lattner | b7e9ef0 | 2011-01-04 07:27:30 +0000 | [diff] [blame] | 239 | Instruction *Inst = I++; |
| 240 | // Look for store instructions, which may be optimized to memset/memcpy. |
| 241 | if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { |
Chris Lattner | b7e9ef0 | 2011-01-04 07:27:30 +0000 | [diff] [blame] | 242 | WeakVH InstPtr(I); |
| 243 | if (!processLoopStore(SI, BECount)) continue; |
| 244 | MadeChange = true; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 245 | |
Chris Lattner | b7e9ef0 | 2011-01-04 07:27:30 +0000 | [diff] [blame] | 246 | // If processing the store invalidated our iterator, start over from the |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 247 | // top of the block. |
Chris Lattner | b7e9ef0 | 2011-01-04 07:27:30 +0000 | [diff] [blame] | 248 | if (InstPtr == 0) |
| 249 | I = BB->begin(); |
| 250 | continue; |
| 251 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 252 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 253 | // Look for memset instructions, which may be optimized to a larger memset. |
| 254 | if (MemSetInst *MSI = dyn_cast<MemSetInst>(Inst)) { |
| 255 | WeakVH InstPtr(I); |
| 256 | if (!processLoopMemSet(MSI, BECount)) continue; |
| 257 | MadeChange = true; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 258 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 259 | // If processing the memset invalidated our iterator, start over from the |
| 260 | // top of the block. |
| 261 | if (InstPtr == 0) |
| 262 | I = BB->begin(); |
| 263 | continue; |
| 264 | } |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 265 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 266 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 267 | return MadeChange; |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 268 | } |
| 269 | |
Chris Lattner | 62c50fd | 2011-01-02 19:01:03 +0000 | [diff] [blame] | 270 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 271 | /// processLoopStore - See if this store can be promoted to a memset or memcpy. |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 272 | bool LoopIdiomRecognize::processLoopStore(StoreInst *SI, const SCEV *BECount) { |
Eli Friedman | 2bc3d52 | 2011-09-12 20:23:13 +0000 | [diff] [blame] | 273 | if (!SI->isSimple()) return false; |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 274 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 275 | Value *StoredVal = SI->getValueOperand(); |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 276 | Value *StorePtr = SI->getPointerOperand(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 277 | |
Chris Lattner | 95ae676 | 2010-12-28 18:53:48 +0000 | [diff] [blame] | 278 | // Reject stores that are so large that they overflow an unsigned. |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 279 | uint64_t SizeInBits = TD->getTypeSizeInBits(StoredVal->getType()); |
Chris Lattner | 95ae676 | 2010-12-28 18:53:48 +0000 | [diff] [blame] | 280 | if ((SizeInBits & 7) || (SizeInBits >> 32) != 0) |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 281 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 282 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 283 | // See if the pointer expression is an AddRec like {base,+,1} on the current |
| 284 | // loop, which indicates a strided store. If we have something else, it's a |
| 285 | // random store we can't handle. |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 286 | const SCEVAddRecExpr *StoreEv = |
| 287 | dyn_cast<SCEVAddRecExpr>(SE->getSCEV(StorePtr)); |
| 288 | if (StoreEv == 0 || StoreEv->getLoop() != CurLoop || !StoreEv->isAffine()) |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 289 | return false; |
| 290 | |
| 291 | // Check to see if the stride matches the size of the store. If so, then we |
| 292 | // know that every byte is touched in the loop. |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 293 | unsigned StoreSize = (unsigned)SizeInBits >> 3; |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 294 | const SCEVConstant *Stride = dyn_cast<SCEVConstant>(StoreEv->getOperand(1)); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 295 | |
Chris Lattner | 408b534 | 2011-02-21 02:08:54 +0000 | [diff] [blame] | 296 | if (Stride == 0 || StoreSize != Stride->getValue()->getValue()) { |
| 297 | // TODO: Could also handle negative stride here someday, that will require |
| 298 | // the validity check in mayLoopAccessLocation to be updated though. |
| 299 | // Enable this to print exact negative strides. |
Chris Lattner | 0e68cee | 2011-02-21 17:02:55 +0000 | [diff] [blame] | 300 | if (0 && Stride && StoreSize == -Stride->getValue()->getValue()) { |
Chris Lattner | 408b534 | 2011-02-21 02:08:54 +0000 | [diff] [blame] | 301 | dbgs() << "NEGATIVE STRIDE: " << *SI << "\n"; |
| 302 | dbgs() << "BB: " << *SI->getParent(); |
| 303 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 304 | |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 305 | return false; |
Chris Lattner | 408b534 | 2011-02-21 02:08:54 +0000 | [diff] [blame] | 306 | } |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 307 | |
| 308 | // See if we can optimize just this store in isolation. |
| 309 | if (processLoopStridedStore(StorePtr, StoreSize, SI->getAlignment(), |
| 310 | StoredVal, SI, StoreEv, BECount)) |
| 311 | return true; |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 312 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 313 | // If the stored value is a strided load in the same loop with the same stride |
| 314 | // this this may be transformable into a memcpy. This kicks in for stuff like |
| 315 | // for (i) A[i] = B[i]; |
| 316 | if (LoadInst *LI = dyn_cast<LoadInst>(StoredVal)) { |
| 317 | const SCEVAddRecExpr *LoadEv = |
| 318 | dyn_cast<SCEVAddRecExpr>(SE->getSCEV(LI->getOperand(0))); |
| 319 | if (LoadEv && LoadEv->getLoop() == CurLoop && LoadEv->isAffine() && |
Eli Friedman | 2bc3d52 | 2011-09-12 20:23:13 +0000 | [diff] [blame] | 320 | StoreEv->getOperand(1) == LoadEv->getOperand(1) && LI->isSimple()) |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 321 | if (processLoopStoreOfLoopLoad(SI, StoreSize, StoreEv, LoadEv, BECount)) |
| 322 | return true; |
| 323 | } |
Chris Lattner | 4ce31fb | 2011-01-02 07:36:44 +0000 | [diff] [blame] | 324 | //errs() << "UNHANDLED strided store: " << *StoreEv << " - " << *SI << "\n"; |
Chris Lattner | 22920b5 | 2010-12-26 20:45:45 +0000 | [diff] [blame] | 325 | |
Chris Lattner | e6bb649 | 2010-12-26 19:39:38 +0000 | [diff] [blame] | 326 | return false; |
| 327 | } |
| 328 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 329 | /// processLoopMemSet - See if this memset can be promoted to a large memset. |
| 330 | bool LoopIdiomRecognize:: |
| 331 | processLoopMemSet(MemSetInst *MSI, const SCEV *BECount) { |
| 332 | // We can only handle non-volatile memsets with a constant size. |
| 333 | if (MSI->isVolatile() || !isa<ConstantInt>(MSI->getLength())) return false; |
| 334 | |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 335 | // If we're not allowed to hack on memset, we fail. |
| 336 | if (!TLI->has(LibFunc::memset)) |
| 337 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 338 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 339 | Value *Pointer = MSI->getDest(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 340 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 341 | // See if the pointer expression is an AddRec like {base,+,1} on the current |
| 342 | // loop, which indicates a strided store. If we have something else, it's a |
| 343 | // random store we can't handle. |
| 344 | const SCEVAddRecExpr *Ev = dyn_cast<SCEVAddRecExpr>(SE->getSCEV(Pointer)); |
| 345 | if (Ev == 0 || Ev->getLoop() != CurLoop || !Ev->isAffine()) |
| 346 | return false; |
| 347 | |
| 348 | // Reject memsets that are so large that they overflow an unsigned. |
| 349 | uint64_t SizeInBytes = cast<ConstantInt>(MSI->getLength())->getZExtValue(); |
| 350 | if ((SizeInBytes >> 32) != 0) |
| 351 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 352 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 353 | // Check to see if the stride matches the size of the memset. If so, then we |
| 354 | // know that every byte is touched in the loop. |
| 355 | const SCEVConstant *Stride = dyn_cast<SCEVConstant>(Ev->getOperand(1)); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 356 | |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 357 | // TODO: Could also handle negative stride here someday, that will require the |
| 358 | // validity check in mayLoopAccessLocation to be updated though. |
| 359 | if (Stride == 0 || MSI->getLength() != Stride->getValue()) |
| 360 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 361 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 362 | return processLoopStridedStore(Pointer, (unsigned)SizeInBytes, |
| 363 | MSI->getAlignment(), MSI->getValue(), |
| 364 | MSI, Ev, BECount); |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 365 | } |
| 366 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 367 | /// getMemSetPatternValue - If a strided store of the specified value is safe to |
| 368 | /// turn into a memset_pattern16, return a ConstantArray of 16 bytes that should |
| 369 | /// be passed in. Otherwise, return null. |
| 370 | /// |
| 371 | /// Note that we don't ever attempt to use memset_pattern8 or 4, because these |
| 372 | /// just replicate their input array and then pass on to memset_pattern16. |
Micah Villmow | 3574eca | 2012-10-08 16:38:25 +0000 | [diff] [blame] | 373 | static Constant *getMemSetPatternValue(Value *V, const DataLayout &TD) { |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 374 | // If the value isn't a constant, we can't promote it to being in a constant |
| 375 | // array. We could theoretically do a store to an alloca or something, but |
| 376 | // that doesn't seem worthwhile. |
| 377 | Constant *C = dyn_cast<Constant>(V); |
| 378 | if (C == 0) return 0; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 379 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 380 | // Only handle simple values that are a power of two bytes in size. |
| 381 | uint64_t Size = TD.getTypeSizeInBits(V->getType()); |
| 382 | if (Size == 0 || (Size & 7) || (Size & (Size-1))) |
| 383 | return 0; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 384 | |
Chris Lattner | 80e8b50 | 2011-02-19 19:56:44 +0000 | [diff] [blame] | 385 | // Don't care enough about darwin/ppc to implement this. |
| 386 | if (TD.isBigEndian()) |
| 387 | return 0; |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 388 | |
| 389 | // Convert to size in bytes. |
| 390 | Size /= 8; |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 391 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 392 | // TODO: If CI is larger than 16-bytes, we can try slicing it in half to see |
Chris Lattner | 80e8b50 | 2011-02-19 19:56:44 +0000 | [diff] [blame] | 393 | // if the top and bottom are the same (e.g. for vectors and large integers). |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 394 | if (Size > 16) return 0; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 395 | |
Chris Lattner | 80e8b50 | 2011-02-19 19:56:44 +0000 | [diff] [blame] | 396 | // If the constant is exactly 16 bytes, just use it. |
| 397 | if (Size == 16) return C; |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 398 | |
Chris Lattner | 80e8b50 | 2011-02-19 19:56:44 +0000 | [diff] [blame] | 399 | // Otherwise, we'll use an array of the constants. |
| 400 | unsigned ArraySize = 16/Size; |
| 401 | ArrayType *AT = ArrayType::get(V->getType(), ArraySize); |
| 402 | return ConstantArray::get(AT, std::vector<Constant*>(ArraySize, C)); |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 403 | } |
| 404 | |
| 405 | |
| 406 | /// processLoopStridedStore - We see a strided store of some value. If we can |
| 407 | /// transform this into a memset or memset_pattern in the loop preheader, do so. |
| 408 | bool LoopIdiomRecognize:: |
| 409 | processLoopStridedStore(Value *DestPtr, unsigned StoreSize, |
| 410 | unsigned StoreAlignment, Value *StoredVal, |
| 411 | Instruction *TheStore, const SCEVAddRecExpr *Ev, |
| 412 | const SCEV *BECount) { |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 413 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 414 | // If the stored value is a byte-wise value (like i32 -1), then it may be |
| 415 | // turned into a memset of i8 -1, assuming that all the consecutive bytes |
| 416 | // are stored. A store of i32 0x01020304 can never be turned into a memset, |
| 417 | // but it can be turned into memset_pattern if the target supports it. |
| 418 | Value *SplatValue = isBytewiseValue(StoredVal); |
| 419 | Constant *PatternValue = 0; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 420 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 421 | // If we're allowed to form a memset, and the stored value would be acceptable |
| 422 | // for memset, use it. |
| 423 | if (SplatValue && TLI->has(LibFunc::memset) && |
| 424 | // Verify that the stored value is loop invariant. If not, we can't |
| 425 | // promote the memset. |
| 426 | CurLoop->isLoopInvariant(SplatValue)) { |
| 427 | // Keep and use SplatValue. |
| 428 | PatternValue = 0; |
| 429 | } else if (TLI->has(LibFunc::memset_pattern16) && |
| 430 | (PatternValue = getMemSetPatternValue(StoredVal, *TD))) { |
| 431 | // It looks like we can use PatternValue! |
| 432 | SplatValue = 0; |
| 433 | } else { |
| 434 | // Otherwise, this isn't an idiom we can transform. For example, we can't |
Eli Friedman | 5ac7c7d | 2011-09-13 00:44:16 +0000 | [diff] [blame] | 435 | // do anything with a 3-byte store. |
Chris Lattner | bafa117 | 2011-01-01 20:12:04 +0000 | [diff] [blame] | 436 | return false; |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 437 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 438 | |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 439 | // Make sure the store has no dependencies (i.e. other loads and stores) in |
| 440 | // the loop. |
| 441 | DependenceAnalysis &DA = getAnalysis<DependenceAnalysis>(); |
| 442 | for (Loop::block_iterator BI = CurLoop->block_begin(), |
| 443 | BE = CurLoop->block_end(); BI != BE; ++BI) |
| 444 | for (BasicBlock::iterator I = (*BI)->begin(), E = (*BI)->end(); I != E; ++I) |
| 445 | if (&*I != TheStore && I->mayReadOrWriteMemory()) { |
| 446 | OwningPtr<Dependence> D(DA.depends(TheStore, I, true)); |
| 447 | if (D) |
| 448 | return false; |
| 449 | } |
| 450 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 451 | // The trip count of the loop and the base pointer of the addrec SCEV is |
| 452 | // guaranteed to be loop invariant, which means that it should dominate the |
| 453 | // header. This allows us to insert code for it in the preheader. |
| 454 | BasicBlock *Preheader = CurLoop->getLoopPreheader(); |
| 455 | IRBuilder<> Builder(Preheader->getTerminator()); |
Andrew Trick | 5e7645b | 2011-06-28 05:07:32 +0000 | [diff] [blame] | 456 | SCEVExpander Expander(*SE, "loop-idiom"); |
Andrew Trick | a5d950f | 2011-06-28 05:04:16 +0000 | [diff] [blame] | 457 | |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 458 | // Okay, we have a strided store "p[i]" of a splattable value. We can turn |
Benjamin Kramer | 3740e79 | 2012-10-21 19:31:16 +0000 | [diff] [blame] | 459 | // this into a memset in the loop preheader now if we want. However, this |
| 460 | // would be unsafe to do if there is anything else in the loop that may read |
Chandler Carruth | ece6c6b | 2012-11-01 08:07:29 +0000 | [diff] [blame] | 461 | // or write to the aliased location. Check for any overlap by generating the |
| 462 | // base pointer and checking the region. |
| 463 | unsigned AddrSpace = cast<PointerType>(DestPtr->getType())->getAddressSpace(); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 464 | Value *BasePtr = |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 465 | Expander.expandCodeFor(Ev->getStart(), Builder.getInt8PtrTy(AddrSpace), |
| 466 | Preheader->getTerminator()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 467 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 468 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 469 | // Okay, everything looks good, insert the memset. |
| 470 | |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 471 | // The # stored bytes is (BECount+1)*Size. Expand the trip count out to |
| 472 | // pointer size if it isn't already. |
Chandler Carruth | ece6c6b | 2012-11-01 08:07:29 +0000 | [diff] [blame] | 473 | Type *IntPtr = TD->getIntPtrType(DestPtr->getContext()); |
Chris Lattner | 7c90b90 | 2011-01-04 00:06:55 +0000 | [diff] [blame] | 474 | BECount = SE->getTruncateOrZeroExtend(BECount, IntPtr); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 475 | |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 476 | const SCEV *NumBytesS = SE->getAddExpr(BECount, SE->getConstant(IntPtr, 1), |
Andrew Trick | 3228cc2 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 477 | SCEV::FlagNUW); |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 478 | if (StoreSize != 1) |
| 479 | NumBytesS = SE->getMulExpr(NumBytesS, SE->getConstant(IntPtr, StoreSize), |
Andrew Trick | 3228cc2 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 480 | SCEV::FlagNUW); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 481 | |
| 482 | Value *NumBytes = |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 483 | Expander.expandCodeFor(NumBytesS, IntPtr, Preheader->getTerminator()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 484 | |
Devang Patel | cd77a50 | 2011-03-07 22:43:45 +0000 | [diff] [blame] | 485 | CallInst *NewCall; |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 486 | if (SplatValue) |
| 487 | NewCall = Builder.CreateMemSet(BasePtr, SplatValue,NumBytes,StoreAlignment); |
| 488 | else { |
| 489 | Module *M = TheStore->getParent()->getParent()->getParent(); |
| 490 | Value *MSP = M->getOrInsertFunction("memset_pattern16", |
| 491 | Builder.getVoidTy(), |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 492 | Builder.getInt8PtrTy(), |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 493 | Builder.getInt8PtrTy(), IntPtr, |
| 494 | (void*)0); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 495 | |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 496 | // Otherwise we should form a memset_pattern16. PatternValue is known to be |
| 497 | // an constant array of 16-bytes. Plop the value into a mergable global. |
| 498 | GlobalVariable *GV = new GlobalVariable(*M, PatternValue->getType(), true, |
| 499 | GlobalValue::InternalLinkage, |
| 500 | PatternValue, ".memset_pattern"); |
| 501 | GV->setUnnamedAddr(true); // Ok to merge these. |
| 502 | GV->setAlignment(16); |
Chris Lattner | 80e8b50 | 2011-02-19 19:56:44 +0000 | [diff] [blame] | 503 | Value *PatternPtr = ConstantExpr::getBitCast(GV, Builder.getInt8PtrTy()); |
Chris Lattner | 3a39372 | 2011-02-19 19:31:39 +0000 | [diff] [blame] | 504 | NewCall = Builder.CreateCall3(MSP, BasePtr, PatternPtr, NumBytes); |
| 505 | } |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 506 | |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 507 | DEBUG(dbgs() << " Formed memset: " << *NewCall << "\n" |
Chris Lattner | e41d3c0 | 2011-01-04 07:46:33 +0000 | [diff] [blame] | 508 | << " from store to: " << *Ev << " at: " << *TheStore << "\n"); |
Devang Patel | cd77a50 | 2011-03-07 22:43:45 +0000 | [diff] [blame] | 509 | NewCall->setDebugLoc(TheStore->getDebugLoc()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 510 | |
Chris Lattner | 9f39188 | 2010-12-27 00:03:23 +0000 | [diff] [blame] | 511 | // Okay, the memset has been formed. Zap the original store and anything that |
| 512 | // feeds into it. |
Benjamin Kramer | 8e0d1c0 | 2012-08-29 15:32:21 +0000 | [diff] [blame] | 513 | deleteDeadInstruction(TheStore, *SE, TLI); |
Chris Lattner | 4ce31fb | 2011-01-02 07:36:44 +0000 | [diff] [blame] | 514 | ++NumMemSet; |
Chris Lattner | a92ff91 | 2010-12-26 23:42:51 +0000 | [diff] [blame] | 515 | return true; |
| 516 | } |
| 517 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 518 | /// processLoopStoreOfLoopLoad - We see a strided store whose value is a |
| 519 | /// same-strided load. |
| 520 | bool LoopIdiomRecognize:: |
| 521 | processLoopStoreOfLoopLoad(StoreInst *SI, unsigned StoreSize, |
| 522 | const SCEVAddRecExpr *StoreEv, |
| 523 | const SCEVAddRecExpr *LoadEv, |
| 524 | const SCEV *BECount) { |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 525 | // If we're not allowed to form memcpy, we fail. |
Benjamin Kramer | badffcf | 2012-10-27 15:18:28 +0000 | [diff] [blame] | 526 | if (!TLI->has(LibFunc::memcpy) || !TLI->has(LibFunc::memmove)) |
Chris Lattner | c19175c | 2011-02-18 22:22:15 +0000 | [diff] [blame] | 527 | return false; |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 528 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 529 | LoadInst *LI = cast<LoadInst>(SI->getValueOperand()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 530 | |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 531 | // Make sure the load and the store have no dependencies (i.e. other loads and |
| 532 | // stores) in the loop. We ignore the direct dependency between SI and LI here |
| 533 | // and check it later. |
| 534 | DependenceAnalysis &DA = getAnalysis<DependenceAnalysis>(); |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 535 | bool isMemcpySafe = true; |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 536 | for (Loop::block_iterator BI = CurLoop->block_begin(), |
| 537 | BE = CurLoop->block_end(); BI != BE; ++BI) |
| 538 | for (BasicBlock::iterator I = (*BI)->begin(), E = (*BI)->end(); I != E; ++I) |
| 539 | if (&*I != SI && &*I != LI && I->mayReadOrWriteMemory()) { |
| 540 | // First, check if there is a dependence of the store. |
| 541 | OwningPtr<Dependence> DS(DA.depends(SI, I, true)); |
| 542 | if (DS) |
| 543 | return false; |
| 544 | // If the scanned instructon may modify memory then we also have to |
| 545 | // check for dependencys on the load. |
| 546 | if (I->mayWriteToMemory()) { |
| 547 | OwningPtr<Dependence> DL(DA.depends(I, LI, true)); |
| 548 | if (DL) |
| 549 | return false; |
| 550 | } |
| 551 | } |
| 552 | |
| 553 | // Now check the dependency between SI and LI. If there is no dependency we |
| 554 | // can safely emit a memcpy. |
| 555 | OwningPtr<Dependence> Dep(DA.depends(SI, LI, true)); |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 556 | if (Dep) { |
Benjamin Kramer | 415f869 | 2012-10-30 19:49:39 +0000 | [diff] [blame] | 557 | // If there is a dependence but the direction is positive (or none) we can |
| 558 | // still safely turn this into memmove. |
| 559 | unsigned Direction = Dep->getDirection(Dep->getLevels()); |
| 560 | if (Direction != Dependence::DVEntry::NONE && |
| 561 | Direction != Dependence::DVEntry::GT) |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 562 | return false; |
| 563 | isMemcpySafe = false; |
| 564 | } |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 565 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 566 | // The trip count of the loop and the base pointer of the addrec SCEV is |
| 567 | // guaranteed to be loop invariant, which means that it should dominate the |
| 568 | // header. This allows us to insert code for it in the preheader. |
| 569 | BasicBlock *Preheader = CurLoop->getLoopPreheader(); |
| 570 | IRBuilder<> Builder(Preheader->getTerminator()); |
Andrew Trick | 5e7645b | 2011-06-28 05:07:32 +0000 | [diff] [blame] | 571 | SCEVExpander Expander(*SE, "loop-idiom"); |
Andrew Trick | a5d950f | 2011-06-28 05:04:16 +0000 | [diff] [blame] | 572 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 573 | // Okay, we have a strided store "p[i]" of a loaded value. We can turn |
Benjamin Kramer | 96c8735 | 2012-10-27 14:25:44 +0000 | [diff] [blame] | 574 | // this into a memcpy in the loop preheader now if we want. |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 575 | Value *StoreBasePtr = |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 576 | Expander.expandCodeFor(StoreEv->getStart(), |
| 577 | Builder.getInt8PtrTy(SI->getPointerAddressSpace()), |
| 578 | Preheader->getTerminator()); |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 579 | Value *LoadBasePtr = |
| 580 | Expander.expandCodeFor(LoadEv->getStart(), |
| 581 | Builder.getInt8PtrTy(LI->getPointerAddressSpace()), |
| 582 | Preheader->getTerminator()); |
| 583 | |
Chris Lattner | 4f81b54 | 2011-05-22 17:39:56 +0000 | [diff] [blame] | 584 | // Okay, everything is safe, we can transform this! |
Andrew Trick | a5d950f | 2011-06-28 05:04:16 +0000 | [diff] [blame] | 585 | |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 586 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 587 | // The # stored bytes is (BECount+1)*Size. Expand the trip count out to |
| 588 | // pointer size if it isn't already. |
Chandler Carruth | ece6c6b | 2012-11-01 08:07:29 +0000 | [diff] [blame] | 589 | Type *IntPtr = TD->getIntPtrType(SI->getContext()); |
Chris Lattner | 7c90b90 | 2011-01-04 00:06:55 +0000 | [diff] [blame] | 590 | BECount = SE->getTruncateOrZeroExtend(BECount, IntPtr); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 591 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 592 | const SCEV *NumBytesS = SE->getAddExpr(BECount, SE->getConstant(IntPtr, 1), |
Andrew Trick | 3228cc2 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 593 | SCEV::FlagNUW); |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 594 | if (StoreSize != 1) |
| 595 | NumBytesS = SE->getMulExpr(NumBytesS, SE->getConstant(IntPtr, StoreSize), |
Andrew Trick | 3228cc2 | 2011-03-14 16:50:06 +0000 | [diff] [blame] | 596 | SCEV::FlagNUW); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 597 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 598 | Value *NumBytes = |
| 599 | Expander.expandCodeFor(NumBytesS, IntPtr, Preheader->getTerminator()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 600 | |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 601 | CallInst *NewCall; |
| 602 | unsigned Align = std::min(SI->getAlignment(), LI->getAlignment()); |
| 603 | if (isMemcpySafe) { |
| 604 | NewCall = Builder.CreateMemCpy(StoreBasePtr, LoadBasePtr, NumBytes, Align); |
| 605 | ++NumMemCpy; |
| 606 | } else { |
| 607 | NewCall = Builder.CreateMemMove(StoreBasePtr, LoadBasePtr, NumBytes, Align); |
| 608 | ++NumMemMove; |
| 609 | } |
Devang Patel | af35841 | 2011-05-04 21:37:05 +0000 | [diff] [blame] | 610 | NewCall->setDebugLoc(SI->getDebugLoc()); |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 611 | |
Benjamin Kramer | d11c5d0 | 2012-10-27 14:25:51 +0000 | [diff] [blame] | 612 | DEBUG(dbgs() << " Formed " << (isMemcpySafe ? "memcpy: " : "memmove: ") |
| 613 | << *NewCall << "\n" |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 614 | << " from load ptr=" << *LoadEv << " at: " << *LI << "\n" |
| 615 | << " from store ptr=" << *StoreEv << " at: " << *SI << "\n"); |
Andrew Trick | a5d950f | 2011-06-28 05:04:16 +0000 | [diff] [blame] | 616 | |
Andrew Trick | d99b39e | 2011-03-14 16:48:10 +0000 | [diff] [blame] | 617 | |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 618 | // Okay, the memset has been formed. Zap the original store and anything that |
| 619 | // feeds into it. |
Benjamin Kramer | 8e0d1c0 | 2012-08-29 15:32:21 +0000 | [diff] [blame] | 620 | deleteDeadInstruction(SI, *SE, TLI); |
Chris Lattner | e2c4392 | 2011-01-02 03:37:56 +0000 | [diff] [blame] | 621 | return true; |
| 622 | } |