blob: 36ad0a5f7b91cb1294ba0892ed9467ba726f89f7 [file] [log] [blame]
Owen Andersone3590582007-08-02 18:11:11 +00001//===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
Owen Anderson5e72db32007-07-11 00:46:18 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson5e72db32007-07-11 00:46:18 +00007//
8//===----------------------------------------------------------------------===//
9//
Chad Rosierd7634fc2015-12-11 18:39:41 +000010// This file implements a trivial dead store elimination that only considers
11// basic-block local redundant stores.
12//
13// FIXME: This should eventually be extended to be a post-dominator tree
14// traversal. Doing so would be pretty trivial.
Owen Anderson5e72db32007-07-11 00:46:18 +000015//
16//===----------------------------------------------------------------------===//
17
Owen Anderson5e72db32007-07-11 00:46:18 +000018#include "llvm/Transforms/Scalar.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000019#include "llvm/ADT/STLExtras.h"
20#include "llvm/ADT/SetVector.h"
21#include "llvm/ADT/Statistic.h"
Owen Andersonaa071722007-07-11 23:19:17 +000022#include "llvm/Analysis/AliasAnalysis.h"
Nick Lewycky32f80512011-10-22 21:59:35 +000023#include "llvm/Analysis/CaptureTracking.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000024#include "llvm/Analysis/GlobalsModRef.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000025#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000026#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000027#include "llvm/Analysis/TargetLibraryInfo.h"
Chris Lattnerc0f33792010-11-30 23:05:20 +000028#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000031#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/Function.h"
33#include "llvm/IR/GlobalVariable.h"
34#include "llvm/IR/Instructions.h"
35#include "llvm/IR/IntrinsicInst.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000036#include "llvm/Pass.h"
37#include "llvm/Support/Debug.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000038#include "llvm/Support/raw_ostream.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000039#include "llvm/Transforms/Utils/Local.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000040using namespace llvm;
41
Chandler Carruth964daaa2014-04-22 02:55:47 +000042#define DEBUG_TYPE "dse"
43
Erik Eckstein11fc8172015-08-13 15:36:11 +000044STATISTIC(NumRedundantStores, "Number of redundant stores deleted");
Owen Anderson5e72db32007-07-11 00:46:18 +000045STATISTIC(NumFastStores, "Number of stores deleted");
46STATISTIC(NumFastOther , "Number of other instrs removed");
47
48namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000049 struct DSE : public FunctionPass {
Chris Lattner51c28a92010-11-30 19:34:42 +000050 AliasAnalysis *AA;
51 MemoryDependenceAnalysis *MD;
Nick Lewyckyf2905af2011-11-05 10:48:42 +000052 DominatorTree *DT;
Nick Lewycky135ac9a2012-09-24 22:07:09 +000053 const TargetLibraryInfo *TLI;
Karthik Bhat3af28942015-08-17 05:51:39 +000054
Eric Christopher0efe9f62015-08-19 02:15:13 +000055 static char ID; // Pass identification, replacement for typeid
56 DSE() : FunctionPass(ID), AA(nullptr), MD(nullptr), DT(nullptr) {
57 initializeDSEPass(*PassRegistry::getPassRegistry());
Karthik Bhat3af28942015-08-17 05:51:39 +000058 }
Owen Anderson5e72db32007-07-11 00:46:18 +000059
Craig Topper3e4c6972014-03-05 09:10:37 +000060 bool runOnFunction(Function &F) override {
Paul Robinsonaf4e64d2014-02-06 00:07:05 +000061 if (skipOptnoneFunction(F))
62 return false;
63
Chandler Carruth7b560d42015-09-09 17:55:00 +000064 AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
Chris Lattner51c28a92010-11-30 19:34:42 +000065 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chandler Carruth73523022014-01-13 13:07:17 +000066 DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
Chandler Carruthdbe40fb2015-08-12 18:01:44 +000067 TLI = &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
Owen Anderson58704ee2011-09-06 18:14:09 +000068
Chris Lattner51c28a92010-11-30 19:34:42 +000069 bool Changed = false;
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +000070 for (BasicBlock &I : F)
Chris Lattnerc053cbb2010-02-11 05:11:54 +000071 // Only check non-dead blocks. Dead blocks may have strange pointer
72 // cycles that will confuse alias analysis.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +000073 if (DT->isReachableFromEntry(&I))
74 Changed |= runOnBasicBlock(I);
Owen Anderson58704ee2011-09-06 18:14:09 +000075
Craig Topperf40110f2014-04-25 05:29:35 +000076 AA = nullptr; MD = nullptr; DT = nullptr;
Owen Anderson5e72db32007-07-11 00:46:18 +000077 return Changed;
78 }
Owen Anderson58704ee2011-09-06 18:14:09 +000079
Owen Anderson5e72db32007-07-11 00:46:18 +000080 bool runOnBasicBlock(BasicBlock &BB);
Igor Laevsky029bd932015-09-23 11:38:44 +000081 bool MemoryIsNotModifiedBetween(Instruction *FirstI, Instruction *SecondI);
Chris Lattner9d179d92010-11-30 01:28:33 +000082 bool HandleFree(CallInst *F);
Chris Lattner1adb6752008-11-28 00:27:14 +000083 bool handleEndBlock(BasicBlock &BB);
Chandler Carruthac80dc72015-06-17 07:18:54 +000084 void RemoveAccessedObjects(const MemoryLocation &LoadedLoc,
Mehdi Aminia28d91d2015-03-10 02:37:25 +000085 SmallSetVector<Value *, 16> &DeadStackObjects,
86 const DataLayout &DL);
Eric Christopher0efe9f62015-08-19 02:15:13 +000087
Craig Topper3e4c6972014-03-05 09:10:37 +000088 void getAnalysisUsage(AnalysisUsage &AU) const override {
Owen Anderson5e72db32007-07-11 00:46:18 +000089 AU.setPreservesCFG();
Chandler Carruth73523022014-01-13 13:07:17 +000090 AU.addRequired<DominatorTreeWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +000091 AU.addRequired<AAResultsWrapperPass>();
Owen Anderson5e72db32007-07-11 00:46:18 +000092 AU.addRequired<MemoryDependenceAnalysis>();
Chandler Carruthdbe40fb2015-08-12 18:01:44 +000093 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth73523022014-01-13 13:07:17 +000094 AU.addPreserved<DominatorTreeWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +000095 AU.addPreserved<GlobalsAAWrapperPass>();
Owen Anderson5e72db32007-07-11 00:46:18 +000096 AU.addPreserved<MemoryDependenceAnalysis>();
97 }
98 };
Alexander Kornienkof00654e2015-06-23 09:49:53 +000099}
Owen Anderson5e72db32007-07-11 00:46:18 +0000100
Dan Gohmand78c4002008-05-13 00:00:25 +0000101char DSE::ID = 0;
Owen Anderson8ac477f2010-10-12 19:48:12 +0000102INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
Chandler Carruth73523022014-01-13 13:07:17 +0000103INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
Chandler Carruth7b560d42015-09-09 17:55:00 +0000104INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
105INITIALIZE_PASS_DEPENDENCY(GlobalsAAWrapperPass)
Owen Anderson8ac477f2010-10-12 19:48:12 +0000106INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
Chandler Carruth19ac7d52015-08-12 18:10:45 +0000107INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
Owen Anderson8ac477f2010-10-12 19:48:12 +0000108INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
Dan Gohmand78c4002008-05-13 00:00:25 +0000109
Owen Anderson10e52ed2007-08-01 06:36:51 +0000110FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
Owen Anderson5e72db32007-07-11 00:46:18 +0000111
Chris Lattner67122512010-11-30 21:58:14 +0000112//===----------------------------------------------------------------------===//
113// Helper functions
114//===----------------------------------------------------------------------===//
115
Eric Christopher0efe9f62015-08-19 02:15:13 +0000116/// DeleteDeadInstruction - Delete this instruction. Before we do, go through
117/// and zero out all the operands of this instruction. If any of them become
118/// dead, delete them and the computation tree that feeds them.
119///
120/// If ValueSet is non-null, remove any deleted instructions from it as well.
121///
122static void DeleteDeadInstruction(Instruction *I,
123 MemoryDependenceAnalysis &MD,
124 const TargetLibraryInfo &TLI,
125 SmallSetVector<Value*, 16> *ValueSet = nullptr) {
126 SmallVector<Instruction*, 32> NowDeadInsts;
127
128 NowDeadInsts.push_back(I);
129 --NumFastOther;
130
131 // Before we touch this instruction, remove it from memdep!
132 do {
133 Instruction *DeadInst = NowDeadInsts.pop_back_val();
134 ++NumFastOther;
135
136 // This instruction is dead, zap it, in stages. Start by removing it from
137 // MemDep, which needs to know the operands and needs it to be in the
138 // function.
139 MD.removeInstruction(DeadInst);
140
141 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
142 Value *Op = DeadInst->getOperand(op);
143 DeadInst->setOperand(op, nullptr);
144
145 // If this operand just became dead, add it to the NowDeadInsts list.
146 if (!Op->use_empty()) continue;
147
148 if (Instruction *OpI = dyn_cast<Instruction>(Op))
149 if (isInstructionTriviallyDead(OpI, &TLI))
150 NowDeadInsts.push_back(OpI);
151 }
152
153 DeadInst->eraseFromParent();
154
155 if (ValueSet) ValueSet->remove(DeadInst);
156 } while (!NowDeadInsts.empty());
157}
158
159
Chris Lattner2227a8a2010-11-30 01:37:52 +0000160/// hasMemoryWrite - Does this instruction write some memory? This only returns
161/// true for things that we can analyze with other helpers below.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000162static bool hasMemoryWrite(Instruction *I, const TargetLibraryInfo &TLI) {
Nick Lewycky90271472009-11-10 06:46:40 +0000163 if (isa<StoreInst>(I))
164 return true;
165 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
166 switch (II->getIntrinsicID()) {
Chris Lattner2764b4d2009-12-02 06:35:55 +0000167 default:
168 return false;
169 case Intrinsic::memset:
170 case Intrinsic::memmove:
171 case Intrinsic::memcpy:
172 case Intrinsic::init_trampoline:
173 case Intrinsic::lifetime_end:
174 return true;
Nick Lewycky90271472009-11-10 06:46:40 +0000175 }
176 }
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000177 if (auto CS = CallSite(I)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000178 if (Function *F = CS.getCalledFunction()) {
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000179 if (TLI.has(LibFunc::strcpy) &&
180 F->getName() == TLI.getName(LibFunc::strcpy)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000181 return true;
182 }
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000183 if (TLI.has(LibFunc::strncpy) &&
184 F->getName() == TLI.getName(LibFunc::strncpy)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000185 return true;
186 }
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000187 if (TLI.has(LibFunc::strcat) &&
188 F->getName() == TLI.getName(LibFunc::strcat)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000189 return true;
190 }
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000191 if (TLI.has(LibFunc::strncat) &&
192 F->getName() == TLI.getName(LibFunc::strncat)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000193 return true;
194 }
195 }
196 }
Nick Lewycky90271472009-11-10 06:46:40 +0000197 return false;
198}
199
Chris Lattner58b779e2010-11-30 07:23:21 +0000200/// getLocForWrite - Return a Location stored to by the specified instruction.
Eli Friedman72a93e52011-09-13 01:28:59 +0000201/// If isRemovable returns true, this function and getLocForRead completely
202/// describe the memory operations for this instruction.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000203static MemoryLocation getLocForWrite(Instruction *Inst, AliasAnalysis &AA) {
Chris Lattner58b779e2010-11-30 07:23:21 +0000204 if (StoreInst *SI = dyn_cast<StoreInst>(Inst))
Chandler Carruth70c61c12015-06-04 02:03:15 +0000205 return MemoryLocation::get(SI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000206
Chris Lattner58b779e2010-11-30 07:23:21 +0000207 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(Inst)) {
208 // memcpy/memmove/memset.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000209 MemoryLocation Loc = MemoryLocation::getForDest(MI);
Chris Lattner58b779e2010-11-30 07:23:21 +0000210 return Loc;
211 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000212
Chris Lattner58b779e2010-11-30 07:23:21 +0000213 IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst);
Chandler Carruthac80dc72015-06-17 07:18:54 +0000214 if (!II)
215 return MemoryLocation();
Owen Anderson58704ee2011-09-06 18:14:09 +0000216
Chris Lattner58b779e2010-11-30 07:23:21 +0000217 switch (II->getIntrinsicID()) {
Chandler Carruthac80dc72015-06-17 07:18:54 +0000218 default:
219 return MemoryLocation(); // Unhandled intrinsic.
Chris Lattner58b779e2010-11-30 07:23:21 +0000220 case Intrinsic::init_trampoline:
Chris Lattner58b779e2010-11-30 07:23:21 +0000221 // FIXME: We don't know the size of the trampoline, so we can't really
222 // handle it here.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000223 return MemoryLocation(II->getArgOperand(0));
Chris Lattner58b779e2010-11-30 07:23:21 +0000224 case Intrinsic::lifetime_end: {
225 uint64_t Len = cast<ConstantInt>(II->getArgOperand(0))->getZExtValue();
Chandler Carruthac80dc72015-06-17 07:18:54 +0000226 return MemoryLocation(II->getArgOperand(1), Len);
Chris Lattner58b779e2010-11-30 07:23:21 +0000227 }
228 }
229}
230
Chris Lattner94fbdf32010-12-06 01:48:06 +0000231/// getLocForRead - Return the location read by the specified "hasMemoryWrite"
232/// instruction if any.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000233static MemoryLocation getLocForRead(Instruction *Inst,
234 const TargetLibraryInfo &TLI) {
235 assert(hasMemoryWrite(Inst, TLI) && "Unknown instruction case");
Owen Anderson58704ee2011-09-06 18:14:09 +0000236
Chris Lattner94fbdf32010-12-06 01:48:06 +0000237 // The only instructions that both read and write are the mem transfer
238 // instructions (memcpy/memmove).
239 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(Inst))
Chandler Carruth70c61c12015-06-04 02:03:15 +0000240 return MemoryLocation::getForSource(MTI);
Chandler Carruthac80dc72015-06-17 07:18:54 +0000241 return MemoryLocation();
Chris Lattner94fbdf32010-12-06 01:48:06 +0000242}
243
244
Chris Lattner3590ef82010-11-30 05:30:45 +0000245/// isRemovable - If the value of this instruction and the memory it writes to
246/// is unused, may we delete this instruction?
247static bool isRemovable(Instruction *I) {
Eli Friedman9a468152011-08-17 22:22:24 +0000248 // Don't remove volatile/atomic stores.
Nick Lewycky90271472009-11-10 06:46:40 +0000249 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Eli Friedman9a468152011-08-17 22:22:24 +0000250 return SI->isUnordered();
Owen Anderson58704ee2011-09-06 18:14:09 +0000251
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000252 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
253 switch (II->getIntrinsicID()) {
254 default: llvm_unreachable("doesn't pass 'hasMemoryWrite' predicate");
255 case Intrinsic::lifetime_end:
256 // Never remove dead lifetime_end's, e.g. because it is followed by a
257 // free.
258 return false;
259 case Intrinsic::init_trampoline:
260 // Always safe to remove init_trampoline.
261 return true;
Owen Anderson58704ee2011-09-06 18:14:09 +0000262
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000263 case Intrinsic::memset:
264 case Intrinsic::memmove:
265 case Intrinsic::memcpy:
266 // Don't remove volatile memory intrinsics.
267 return !cast<MemIntrinsic>(II)->isVolatile();
268 }
Chris Lattnerb63ba732010-11-30 19:12:10 +0000269 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000270
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000271 if (auto CS = CallSite(I))
Nick Lewycky42bca052012-09-25 01:55:59 +0000272 return CS.getInstruction()->use_empty();
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000273
274 return false;
Nick Lewycky90271472009-11-10 06:46:40 +0000275}
276
Pete Cooper856977c2011-11-09 23:07:35 +0000277
278/// isShortenable - Returns true if this instruction can be safely shortened in
279/// length.
280static bool isShortenable(Instruction *I) {
281 // Don't shorten stores for now
282 if (isa<StoreInst>(I))
283 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000284
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000285 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
286 switch (II->getIntrinsicID()) {
287 default: return false;
288 case Intrinsic::memset:
289 case Intrinsic::memcpy:
290 // Do shorten memory intrinsics.
291 return true;
292 }
Pete Cooper856977c2011-11-09 23:07:35 +0000293 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000294
295 // Don't shorten libcalls calls for now.
296
297 return false;
Pete Cooper856977c2011-11-09 23:07:35 +0000298}
299
Chris Lattner67122512010-11-30 21:58:14 +0000300/// getStoredPointerOperand - Return the pointer that is being written to.
301static Value *getStoredPointerOperand(Instruction *I) {
Nick Lewycky90271472009-11-10 06:46:40 +0000302 if (StoreInst *SI = dyn_cast<StoreInst>(I))
303 return SI->getPointerOperand();
304 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
Chris Lattner67122512010-11-30 21:58:14 +0000305 return MI->getDest();
Gabor Greif91f95892010-06-24 12:03:56 +0000306
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000307 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
308 switch (II->getIntrinsicID()) {
309 default: llvm_unreachable("Unexpected intrinsic!");
310 case Intrinsic::init_trampoline:
311 return II->getArgOperand(0);
312 }
Duncan Sands1925d3a2009-11-10 13:49:50 +0000313 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000314
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000315 CallSite CS(I);
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000316 // All the supported functions so far happen to have dest as their first
317 // argument.
318 return CS.getArgument(0);
Nick Lewycky90271472009-11-10 06:46:40 +0000319}
320
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000321static uint64_t getPointerSize(const Value *V, const DataLayout &DL,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000322 const TargetLibraryInfo &TLI) {
Nuno Lopes55fff832012-06-21 15:45:28 +0000323 uint64_t Size;
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000324 if (getObjectSize(V, Size, DL, &TLI))
Nuno Lopes55fff832012-06-21 15:45:28 +0000325 return Size;
Chandler Carruthecbd1682015-06-17 07:21:38 +0000326 return MemoryLocation::UnknownSize;
Chris Lattner903add82010-11-30 23:43:23 +0000327}
Chris Lattner51c28a92010-11-30 19:34:42 +0000328
Pete Cooper856977c2011-11-09 23:07:35 +0000329namespace {
330 enum OverwriteResult
331 {
332 OverwriteComplete,
333 OverwriteEnd,
334 OverwriteUnknown
335 };
336}
337
338/// isOverwrite - Return 'OverwriteComplete' if a store to the 'Later' location
Chris Lattner58b779e2010-11-30 07:23:21 +0000339/// completely overwrites a store to the 'Earlier' location.
Nadav Rotem465834c2012-07-24 10:51:42 +0000340/// 'OverwriteEnd' if the end of the 'Earlier' location is completely
Pete Cooper39b52552012-02-28 05:06:24 +0000341/// overwritten by 'Later', or 'OverwriteUnknown' if nothing can be determined
Chandler Carruthac80dc72015-06-17 07:18:54 +0000342static OverwriteResult isOverwrite(const MemoryLocation &Later,
343 const MemoryLocation &Earlier,
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000344 const DataLayout &DL,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000345 const TargetLibraryInfo &TLI,
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000346 int64_t &EarlierOff, int64_t &LaterOff) {
Chris Lattnerc0f33792010-11-30 23:05:20 +0000347 const Value *P1 = Earlier.Ptr->stripPointerCasts();
348 const Value *P2 = Later.Ptr->stripPointerCasts();
Owen Anderson58704ee2011-09-06 18:14:09 +0000349
Chris Lattnerc0f33792010-11-30 23:05:20 +0000350 // If the start pointers are the same, we just have to compare sizes to see if
351 // the later store was larger than the earlier store.
352 if (P1 == P2) {
353 // If we don't know the sizes of either access, then we can't do a
354 // comparison.
Chandler Carruthecbd1682015-06-17 07:21:38 +0000355 if (Later.Size == MemoryLocation::UnknownSize ||
356 Earlier.Size == MemoryLocation::UnknownSize)
Pete Cooper856977c2011-11-09 23:07:35 +0000357 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000358
Chris Lattnerc0f33792010-11-30 23:05:20 +0000359 // Make sure that the Later size is >= the Earlier size.
Pete Cooper856977c2011-11-09 23:07:35 +0000360 if (Later.Size >= Earlier.Size)
361 return OverwriteComplete;
Chris Lattner77d79fa2010-11-30 19:28:23 +0000362 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000363
Chris Lattnerc0f33792010-11-30 23:05:20 +0000364 // Otherwise, we have to have size information, and the later store has to be
365 // larger than the earlier one.
Chandler Carruthecbd1682015-06-17 07:21:38 +0000366 if (Later.Size == MemoryLocation::UnknownSize ||
367 Earlier.Size == MemoryLocation::UnknownSize)
Pete Cooper856977c2011-11-09 23:07:35 +0000368 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000369
Chris Lattner903add82010-11-30 23:43:23 +0000370 // Check to see if the later store is to the entire object (either a global,
Reid Kleckner26af2ca2014-01-28 02:38:36 +0000371 // an alloca, or a byval/inalloca argument). If so, then it clearly
372 // overwrites any other store to the same object.
Rafael Espindola5f57f462014-02-21 18:34:28 +0000373 const Value *UO1 = GetUnderlyingObject(P1, DL),
374 *UO2 = GetUnderlyingObject(P2, DL);
Owen Anderson58704ee2011-09-06 18:14:09 +0000375
Chris Lattner903add82010-11-30 23:43:23 +0000376 // If we can't resolve the same pointers to the same object, then we can't
377 // analyze them at all.
378 if (UO1 != UO2)
Pete Cooper856977c2011-11-09 23:07:35 +0000379 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000380
Chris Lattner903add82010-11-30 23:43:23 +0000381 // If the "Later" store is to a recognizable object, get its size.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000382 uint64_t ObjectSize = getPointerSize(UO2, DL, TLI);
Chandler Carruthecbd1682015-06-17 07:21:38 +0000383 if (ObjectSize != MemoryLocation::UnknownSize)
Pete Coopera4237c32011-11-10 20:22:08 +0000384 if (ObjectSize == Later.Size && ObjectSize >= Earlier.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000385 return OverwriteComplete;
Owen Anderson58704ee2011-09-06 18:14:09 +0000386
Chris Lattnerc0f33792010-11-30 23:05:20 +0000387 // Okay, we have stores to two completely different pointers. Try to
388 // decompose the pointer into a "base + constant_offset" form. If the base
389 // pointers are equal, then we can reason about the two stores.
Pete Cooper856977c2011-11-09 23:07:35 +0000390 EarlierOff = 0;
391 LaterOff = 0;
Rafael Espindola5f57f462014-02-21 18:34:28 +0000392 const Value *BP1 = GetPointerBaseWithConstantOffset(P1, EarlierOff, DL);
393 const Value *BP2 = GetPointerBaseWithConstantOffset(P2, LaterOff, DL);
Owen Anderson58704ee2011-09-06 18:14:09 +0000394
Chris Lattnerc0f33792010-11-30 23:05:20 +0000395 // If the base pointers still differ, we have two completely different stores.
396 if (BP1 != BP2)
Pete Cooper856977c2011-11-09 23:07:35 +0000397 return OverwriteUnknown;
Bill Wendlingdb40b5c2011-03-26 01:20:37 +0000398
Bill Wendling19f33b92011-03-26 08:02:59 +0000399 // The later store completely overlaps the earlier store if:
Owen Anderson58704ee2011-09-06 18:14:09 +0000400 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000401 // 1. Both start at the same offset and the later one's size is greater than
402 // or equal to the earlier one's, or
403 //
404 // |--earlier--|
405 // |-- later --|
Owen Anderson58704ee2011-09-06 18:14:09 +0000406 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000407 // 2. The earlier store has an offset greater than the later offset, but which
408 // still lies completely within the later store.
409 //
410 // |--earlier--|
411 // |----- later ------|
Bill Wendling50341592011-03-30 21:37:19 +0000412 //
413 // We have to be careful here as *Off is signed while *.Size is unsigned.
Bill Wendlingb5139922011-03-26 09:32:07 +0000414 if (EarlierOff >= LaterOff &&
Craig Topper2a404182012-08-14 07:32:05 +0000415 Later.Size >= Earlier.Size &&
Bill Wendling50341592011-03-30 21:37:19 +0000416 uint64_t(EarlierOff - LaterOff) + Earlier.Size <= Later.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000417 return OverwriteComplete;
Nadav Rotem465834c2012-07-24 10:51:42 +0000418
Pete Cooper856977c2011-11-09 23:07:35 +0000419 // The other interesting case is if the later store overwrites the end of
420 // the earlier store
421 //
422 // |--earlier--|
423 // |-- later --|
424 //
425 // In this case we may want to trim the size of earlier to avoid generating
426 // writes to addresses which will definitely be overwritten later
427 if (LaterOff > EarlierOff &&
428 LaterOff < int64_t(EarlierOff + Earlier.Size) &&
Pete Coopere03fe832011-12-03 00:04:30 +0000429 int64_t(LaterOff + Later.Size) >= int64_t(EarlierOff + Earlier.Size))
Pete Cooper856977c2011-11-09 23:07:35 +0000430 return OverwriteEnd;
Bill Wendling19f33b92011-03-26 08:02:59 +0000431
432 // Otherwise, they don't completely overlap.
Pete Cooper856977c2011-11-09 23:07:35 +0000433 return OverwriteUnknown;
Nick Lewycky90271472009-11-10 06:46:40 +0000434}
435
Chris Lattner94fbdf32010-12-06 01:48:06 +0000436/// isPossibleSelfRead - If 'Inst' might be a self read (i.e. a noop copy of a
437/// memory region into an identical pointer) then it doesn't actually make its
Owen Anderson58704ee2011-09-06 18:14:09 +0000438/// input dead in the traditional sense. Consider this case:
Chris Lattner94fbdf32010-12-06 01:48:06 +0000439///
440/// memcpy(A <- B)
441/// memcpy(A <- A)
442///
443/// In this case, the second store to A does not make the first store to A dead.
444/// The usual situation isn't an explicit A<-A store like this (which can be
445/// trivially removed) but a case where two pointers may alias.
446///
447/// This function detects when it is unsafe to remove a dependent instruction
448/// because the DSE inducing instruction may be a self-read.
449static bool isPossibleSelfRead(Instruction *Inst,
Chandler Carruthac80dc72015-06-17 07:18:54 +0000450 const MemoryLocation &InstStoreLoc,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000451 Instruction *DepWrite,
452 const TargetLibraryInfo &TLI,
453 AliasAnalysis &AA) {
Chris Lattner94fbdf32010-12-06 01:48:06 +0000454 // Self reads can only happen for instructions that read memory. Get the
455 // location read.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000456 MemoryLocation InstReadLoc = getLocForRead(Inst, TLI);
Craig Topperf40110f2014-04-25 05:29:35 +0000457 if (!InstReadLoc.Ptr) return false; // Not a reading instruction.
Owen Anderson58704ee2011-09-06 18:14:09 +0000458
Chris Lattner94fbdf32010-12-06 01:48:06 +0000459 // If the read and written loc obviously don't alias, it isn't a read.
460 if (AA.isNoAlias(InstReadLoc, InstStoreLoc)) return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000461
Chris Lattner94fbdf32010-12-06 01:48:06 +0000462 // Okay, 'Inst' may copy over itself. However, we can still remove a the
463 // DepWrite instruction if we can prove that it reads from the same location
464 // as Inst. This handles useful cases like:
465 // memcpy(A <- B)
466 // memcpy(A <- B)
467 // Here we don't know if A/B may alias, but we do know that B/B are must
468 // aliases, so removing the first memcpy is safe (assuming it writes <= #
469 // bytes as the second one.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000470 MemoryLocation DepReadLoc = getLocForRead(DepWrite, TLI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000471
Chris Lattner94fbdf32010-12-06 01:48:06 +0000472 if (DepReadLoc.Ptr && AA.isMustAlias(InstReadLoc.Ptr, DepReadLoc.Ptr))
473 return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000474
Chris Lattner94fbdf32010-12-06 01:48:06 +0000475 // If DepWrite doesn't read memory or if we can't prove it is a must alias,
476 // then it can't be considered dead.
477 return true;
478}
479
Chris Lattner67122512010-11-30 21:58:14 +0000480
481//===----------------------------------------------------------------------===//
482// DSE Pass
483//===----------------------------------------------------------------------===//
484
Owen Anderson10e52ed2007-08-01 06:36:51 +0000485bool DSE::runOnBasicBlock(BasicBlock &BB) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000486 const DataLayout &DL = BB.getModule()->getDataLayout();
Owen Anderson5e72db32007-07-11 00:46:18 +0000487 bool MadeChange = false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000488
Chris Lattner49162672009-09-02 06:31:02 +0000489 // Do a top-down walk on the BB.
Chris Lattnerf2a8ba42008-11-28 21:29:52 +0000490 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000491 Instruction *Inst = &*BBI++;
Owen Anderson58704ee2011-09-06 18:14:09 +0000492
Chris Lattner9d179d92010-11-30 01:28:33 +0000493 // Handle 'free' calls specially.
Nick Lewycky135ac9a2012-09-24 22:07:09 +0000494 if (CallInst *F = isFreeCall(Inst, TLI)) {
Chris Lattner9d179d92010-11-30 01:28:33 +0000495 MadeChange |= HandleFree(F);
496 continue;
497 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000498
Chris Lattner2227a8a2010-11-30 01:37:52 +0000499 // If we find something that writes memory, get its memory dependence.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000500 if (!hasMemoryWrite(Inst, *TLI))
Owen Anderson0aecf0e2007-08-08 04:52:29 +0000501 continue;
Chris Lattnerd4f10902010-11-30 00:01:19 +0000502
Rui Ueyamac487f772014-08-06 19:30:38 +0000503 // If we're storing the same value back to a pointer that we just
504 // loaded from, then the store can be removed.
Nick Lewycky90271472009-11-10 06:46:40 +0000505 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000506
507 auto RemoveDeadInstAndUpdateBBI = [&](Instruction *DeadInst) {
508 // DeleteDeadInstruction can delete the current instruction. Save BBI
509 // in case we need it.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000510 WeakVH NextInst(&*BBI);
Igor Laevsky029bd932015-09-23 11:38:44 +0000511
512 DeleteDeadInstruction(DeadInst, *MD, *TLI);
513
514 if (!NextInst) // Next instruction deleted.
515 BBI = BB.begin();
516 else if (BBI != BB.begin()) // Revisit this instruction if possible.
517 --BBI;
518 ++NumRedundantStores;
519 MadeChange = true;
520 };
521
Erik Eckstein11fc8172015-08-13 15:36:11 +0000522 if (LoadInst *DepLoad = dyn_cast<LoadInst>(SI->getValueOperand())) {
Nick Lewycky90271472009-11-10 06:46:40 +0000523 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
Erik Eckstein11fc8172015-08-13 15:36:11 +0000524 isRemovable(SI) &&
525 MemoryIsNotModifiedBetween(DepLoad, SI)) {
526
Chris Lattnerca335e32010-12-06 21:13:51 +0000527 DEBUG(dbgs() << "DSE: Remove Store Of Load from same pointer:\n "
528 << "LOAD: " << *DepLoad << "\n STORE: " << *SI << '\n');
Philip Reames00c9b642014-08-05 17:48:20 +0000529
Igor Laevsky029bd932015-09-23 11:38:44 +0000530 RemoveDeadInstAndUpdateBBI(SI);
531 continue;
532 }
533 }
Philip Reames00c9b642014-08-05 17:48:20 +0000534
Igor Laevsky029bd932015-09-23 11:38:44 +0000535 // Remove null stores into the calloc'ed objects
536 Constant *StoredConstant = dyn_cast<Constant>(SI->getValueOperand());
Rui Ueyamac487f772014-08-06 19:30:38 +0000537
Igor Laevsky029bd932015-09-23 11:38:44 +0000538 if (StoredConstant && StoredConstant->isNullValue() &&
539 isRemovable(SI)) {
540 Instruction *UnderlyingPointer = dyn_cast<Instruction>(
541 GetUnderlyingObject(SI->getPointerOperand(), DL));
542
543 if (UnderlyingPointer && isCallocLikeFn(UnderlyingPointer, TLI) &&
544 MemoryIsNotModifiedBetween(UnderlyingPointer, SI)) {
545 DEBUG(dbgs()
546 << "DSE: Remove null store to the calloc'ed object:\n DEAD: "
547 << *Inst << "\n OBJECT: " << *UnderlyingPointer << '\n');
548
549 RemoveDeadInstAndUpdateBBI(SI);
Rui Ueyamac487f772014-08-06 19:30:38 +0000550 continue;
551 }
Philip Reames00c9b642014-08-05 17:48:20 +0000552 }
Owen Anderson5e72db32007-07-11 00:46:18 +0000553 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000554
Erik Eckstein11fc8172015-08-13 15:36:11 +0000555 MemDepResult InstDep = MD->getDependency(Inst);
556
Chad Rosierd7634fc2015-12-11 18:39:41 +0000557 // Ignore any store where we can't find a local dependence.
558 // FIXME: cross-block DSE would be fun. :)
559 if (!InstDep.isDef() && !InstDep.isClobber())
560 continue;
Erik Eckstein11fc8172015-08-13 15:36:11 +0000561
Chad Rosierd7634fc2015-12-11 18:39:41 +0000562 // Figure out what location is being stored to.
563 MemoryLocation Loc = getLocForWrite(Inst, *AA);
Chris Lattner58b779e2010-11-30 07:23:21 +0000564
Chad Rosierd7634fc2015-12-11 18:39:41 +0000565 // If we didn't get a useful location, fail.
566 if (!Loc.Ptr)
567 continue;
568
569 while (InstDep.isDef() || InstDep.isClobber()) {
570 // Get the memory clobbered by the instruction we depend on. MemDep will
571 // skip any instructions that 'Loc' clearly doesn't interact with. If we
572 // end up depending on a may- or must-aliased load, then we can't optimize
573 // away the store and we bail out. However, if we depend on on something
574 // that overwrites the memory location we *can* potentially optimize it.
575 //
576 // Find out what memory location the dependent instruction stores.
577 Instruction *DepWrite = InstDep.getInst();
578 MemoryLocation DepLoc = getLocForWrite(DepWrite, *AA);
579 // If we didn't get a useful location, or if it isn't a size, bail out.
580 if (!DepLoc.Ptr)
581 break;
582
583 // If we find a write that is a) removable (i.e., non-volatile), b) is
584 // completely obliterated by the store to 'Loc', and c) which we know that
585 // 'Inst' doesn't load from, then we can remove it.
586 if (isRemovable(DepWrite) &&
587 !isPossibleSelfRead(Inst, Loc, DepWrite, *TLI, *AA)) {
588 int64_t InstWriteOffset, DepWriteOffset;
589 OverwriteResult OR =
590 isOverwrite(Loc, DepLoc, DL, *TLI, DepWriteOffset, InstWriteOffset);
591 if (OR == OverwriteComplete) {
592 DEBUG(dbgs() << "DSE: Remove Dead Store:\n DEAD: "
593 << *DepWrite << "\n KILLER: " << *Inst << '\n');
594
595 // Delete the store and now-dead instructions that feed it.
596 DeleteDeadInstruction(DepWrite, *MD, *TLI);
597 ++NumFastStores;
598 MadeChange = true;
599
600 // DeleteDeadInstruction can delete the current instruction in loop
601 // cases, reset BBI.
602 BBI = Inst->getIterator();
603 if (BBI != BB.begin())
604 --BBI;
Pete Cooper856977c2011-11-09 23:07:35 +0000605 break;
Chad Rosierd7634fc2015-12-11 18:39:41 +0000606 } else if (OR == OverwriteEnd && isShortenable(DepWrite)) {
607 // TODO: base this on the target vector size so that if the earlier
608 // store was too small to get vector writes anyway then its likely
609 // a good idea to shorten it
610 // Power of 2 vector writes are probably always a bad idea to optimize
611 // as any store/memset/memcpy is likely using vector instructions so
612 // shortening it to not vector size is likely to be slower
613 MemIntrinsic* DepIntrinsic = cast<MemIntrinsic>(DepWrite);
614 unsigned DepWriteAlign = DepIntrinsic->getAlignment();
615 if (llvm::isPowerOf2_64(InstWriteOffset) ||
616 ((DepWriteAlign != 0) && InstWriteOffset % DepWriteAlign == 0)) {
Nadav Rotem465834c2012-07-24 10:51:42 +0000617
Chad Rosierd7634fc2015-12-11 18:39:41 +0000618 DEBUG(dbgs() << "DSE: Remove Dead Store:\n OW END: "
619 << *DepWrite << "\n KILLER (offset "
620 << InstWriteOffset << ", "
621 << DepLoc.Size << ")"
622 << *Inst << '\n');
Nadav Rotem465834c2012-07-24 10:51:42 +0000623
Chad Rosierd7634fc2015-12-11 18:39:41 +0000624 Value* DepWriteLength = DepIntrinsic->getLength();
625 Value* TrimmedLength = ConstantInt::get(DepWriteLength->getType(),
626 InstWriteOffset -
627 DepWriteOffset);
628 DepIntrinsic->setLength(TrimmedLength);
Pete Cooper856977c2011-11-09 23:07:35 +0000629 MadeChange = true;
630 }
631 }
Chris Lattner58b779e2010-11-30 07:23:21 +0000632 }
Chad Rosierd7634fc2015-12-11 18:39:41 +0000633
634 // If this is a may-aliased store that is clobbering the store value, we
635 // can keep searching past it for another must-aliased pointer that stores
636 // to the same location. For example, in:
637 // store -> P
638 // store -> Q
639 // store -> P
640 // we can remove the first store to P even though we don't know if P and Q
641 // alias.
642 if (DepWrite == &BB.front()) break;
643
644 // Can't look past this instruction if it might read 'Loc'.
645 if (AA->getModRefInfo(DepWrite, Loc) & MRI_Ref)
646 break;
647
648 InstDep = MD->getPointerDependencyFrom(Loc, false,
649 DepWrite->getIterator(), &BB);
Owen Anderson2b2bd282009-10-28 07:05:35 +0000650 }
Owen Anderson5e72db32007-07-11 00:46:18 +0000651 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000652
Chris Lattnerf2a8ba42008-11-28 21:29:52 +0000653 // If this block ends in a return, unwind, or unreachable, all allocas are
654 // dead at its end, which means stores to them are also dead.
Owen Anderson32c4a052007-07-12 21:41:30 +0000655 if (BB.getTerminator()->getNumSuccessors() == 0)
Chris Lattner1adb6752008-11-28 00:27:14 +0000656 MadeChange |= handleEndBlock(BB);
Owen Anderson58704ee2011-09-06 18:14:09 +0000657
Owen Anderson5e72db32007-07-11 00:46:18 +0000658 return MadeChange;
659}
660
Igor Laevsky029bd932015-09-23 11:38:44 +0000661/// Returns true if the memory which is accessed by the second instruction is not
662/// modified between the first and the second instruction.
663/// Precondition: Second instruction must be dominated by the first
Erik Eckstein11fc8172015-08-13 15:36:11 +0000664/// instruction.
Igor Laevsky029bd932015-09-23 11:38:44 +0000665bool DSE::MemoryIsNotModifiedBetween(Instruction *FirstI,
666 Instruction *SecondI) {
Erik Eckstein11fc8172015-08-13 15:36:11 +0000667 SmallVector<BasicBlock *, 16> WorkList;
668 SmallPtrSet<BasicBlock *, 8> Visited;
Igor Laevsky029bd932015-09-23 11:38:44 +0000669 BasicBlock::iterator FirstBBI(FirstI);
670 ++FirstBBI;
671 BasicBlock::iterator SecondBBI(SecondI);
672 BasicBlock *FirstBB = FirstI->getParent();
673 BasicBlock *SecondBB = SecondI->getParent();
674 MemoryLocation MemLoc = MemoryLocation::get(SecondI);
Erik Eckstein11fc8172015-08-13 15:36:11 +0000675
676 // Start checking the store-block.
Igor Laevsky029bd932015-09-23 11:38:44 +0000677 WorkList.push_back(SecondBB);
Erik Eckstein11fc8172015-08-13 15:36:11 +0000678 bool isFirstBlock = true;
679
680 // Check all blocks going backward until we reach the load-block.
681 while (!WorkList.empty()) {
682 BasicBlock *B = WorkList.pop_back_val();
683
Igor Laevsky029bd932015-09-23 11:38:44 +0000684 // Ignore instructions before LI if this is the FirstBB.
685 BasicBlock::iterator BI = (B == FirstBB ? FirstBBI : B->begin());
Erik Eckstein11fc8172015-08-13 15:36:11 +0000686
687 BasicBlock::iterator EI;
688 if (isFirstBlock) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000689 // Ignore instructions after SI if this is the first visit of SecondBB.
690 assert(B == SecondBB && "first block is not the store block");
691 EI = SecondBBI;
Erik Eckstein11fc8172015-08-13 15:36:11 +0000692 isFirstBlock = false;
693 } else {
Igor Laevsky029bd932015-09-23 11:38:44 +0000694 // It's not SecondBB or (in case of a loop) the second visit of SecondBB.
Erik Eckstein11fc8172015-08-13 15:36:11 +0000695 // In this case we also have to look at instructions after SI.
696 EI = B->end();
697 }
698 for (; BI != EI; ++BI) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000699 Instruction *I = &*BI;
Igor Laevsky029bd932015-09-23 11:38:44 +0000700 if (I->mayWriteToMemory() && I != SecondI) {
701 auto Res = AA->getModRefInfo(I, MemLoc);
Erik Eckstein11fc8172015-08-13 15:36:11 +0000702 if (Res != MRI_NoModRef)
703 return false;
704 }
705 }
Igor Laevsky029bd932015-09-23 11:38:44 +0000706 if (B != FirstBB) {
707 assert(B != &FirstBB->getParent()->getEntryBlock() &&
Erik Eckstein11fc8172015-08-13 15:36:11 +0000708 "Should not hit the entry block because SI must be dominated by LI");
Chad Rosierd7634fc2015-12-11 18:39:41 +0000709 for (auto PredI = pred_begin(B), PE = pred_end(B); PredI != PE; ++PredI) {
710 if (!Visited.insert(*PredI).second)
Erik Eckstein11fc8172015-08-13 15:36:11 +0000711 continue;
Chad Rosierd7634fc2015-12-11 18:39:41 +0000712 WorkList.push_back(*PredI);
Erik Eckstein11fc8172015-08-13 15:36:11 +0000713 }
714 }
715 }
716 return true;
717}
718
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000719/// Find all blocks that will unconditionally lead to the block BB and append
720/// them to F.
721static void FindUnconditionalPreds(SmallVectorImpl<BasicBlock *> &Blocks,
722 BasicBlock *BB, DominatorTree *DT) {
Duncan P. N. Exon Smith6c990152014-07-21 17:06:51 +0000723 for (pred_iterator I = pred_begin(BB), E = pred_end(BB); I != E; ++I) {
724 BasicBlock *Pred = *I;
Nick Lewyckyfe970722011-12-08 22:36:35 +0000725 if (Pred == BB) continue;
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000726 TerminatorInst *PredTI = Pred->getTerminator();
727 if (PredTI->getNumSuccessors() != 1)
728 continue;
729
730 if (DT->isReachableFromEntry(Pred))
731 Blocks.push_back(Pred);
732 }
733}
734
Chris Lattner9d179d92010-11-30 01:28:33 +0000735/// HandleFree - Handle frees of entire structures whose dependency is a store
736/// to a field of that structure.
737bool DSE::HandleFree(CallInst *F) {
Eli Friedman7d58bc72011-06-15 00:47:34 +0000738 bool MadeChange = false;
739
Chandler Carruthac80dc72015-06-17 07:18:54 +0000740 MemoryLocation Loc = MemoryLocation(F->getOperand(0));
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000741 SmallVector<BasicBlock *, 16> Blocks;
742 Blocks.push_back(F->getParent());
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000743 const DataLayout &DL = F->getModule()->getDataLayout();
Eli Friedman7d58bc72011-06-15 00:47:34 +0000744
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000745 while (!Blocks.empty()) {
746 BasicBlock *BB = Blocks.pop_back_val();
747 Instruction *InstPt = BB->getTerminator();
748 if (BB == F->getParent()) InstPt = F;
Owen Anderson58704ee2011-09-06 18:14:09 +0000749
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000750 MemDepResult Dep =
751 MD->getPointerDependencyFrom(Loc, false, InstPt->getIterator(), BB);
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000752 while (Dep.isDef() || Dep.isClobber()) {
753 Instruction *Dependency = Dep.getInst();
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000754 if (!hasMemoryWrite(Dependency, *TLI) || !isRemovable(Dependency))
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000755 break;
Duncan Sandsfe3bef02008-01-20 10:49:23 +0000756
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000757 Value *DepPointer =
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000758 GetUnderlyingObject(getStoredPointerOperand(Dependency), DL);
Owen Anderson58704ee2011-09-06 18:14:09 +0000759
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000760 // Check for aliasing.
761 if (!AA->isMustAlias(F->getArgOperand(0), DepPointer))
762 break;
Dan Gohmand4b7fff2010-11-12 02:19:17 +0000763
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000764 auto Next = ++Dependency->getIterator();
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000765
766 // DCE instructions only used to calculate that store
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000767 DeleteDeadInstruction(Dependency, *MD, *TLI);
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000768 ++NumFastStores;
769 MadeChange = true;
770
771 // Inst's old Dependency is now deleted. Compute the next dependency,
772 // which may also be dead, as in
773 // s[0] = 0;
774 // s[1] = 0; // This has just been deleted.
775 // free(s);
776 Dep = MD->getPointerDependencyFrom(Loc, false, Next, BB);
777 }
778
779 if (Dep.isNonLocal())
780 FindUnconditionalPreds(Blocks, BB, DT);
781 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000782
Eli Friedman7d58bc72011-06-15 00:47:34 +0000783 return MadeChange;
Owen Andersonaa071722007-07-11 23:19:17 +0000784}
785
Owen Andersone3590582007-08-02 18:11:11 +0000786/// handleEndBlock - Remove dead stores to stack-allocated locations in the
Owen Anderson52aaabf2007-08-08 17:50:09 +0000787/// function end block. Ex:
788/// %A = alloca i32
789/// ...
790/// store i32 1, i32* %A
791/// ret void
Chris Lattner1adb6752008-11-28 00:27:14 +0000792bool DSE::handleEndBlock(BasicBlock &BB) {
Owen Anderson32c4a052007-07-12 21:41:30 +0000793 bool MadeChange = false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000794
Chris Lattner7fe08b62010-11-30 21:32:12 +0000795 // Keep track of all of the stack objects that are dead at the end of the
796 // function.
Evan Cheng773b2cd2012-06-16 04:28:11 +0000797 SmallSetVector<Value*, 16> DeadStackObjects;
Owen Anderson58704ee2011-09-06 18:14:09 +0000798
Chris Lattner1adb6752008-11-28 00:27:14 +0000799 // Find all of the alloca'd pointers in the entry block.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000800 BasicBlock &Entry = BB.getParent()->front();
801 for (Instruction &I : Entry) {
802 if (isa<AllocaInst>(&I))
803 DeadStackObjects.insert(&I);
Owen Anderson58704ee2011-09-06 18:14:09 +0000804
Nick Lewycky32f80512011-10-22 21:59:35 +0000805 // Okay, so these are dead heap objects, but if the pointer never escapes
806 // then it's leaked by this function anyways.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000807 else if (isAllocLikeFn(&I, TLI) && !PointerMayBeCaptured(&I, true, true))
808 DeadStackObjects.insert(&I);
Nick Lewycky32f80512011-10-22 21:59:35 +0000809 }
810
Reid Kleckner26af2ca2014-01-28 02:38:36 +0000811 // Treat byval or inalloca arguments the same, stores to them are dead at the
812 // end of the function.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000813 for (Argument &AI : BB.getParent()->args())
814 if (AI.hasByValOrInAllocaAttr())
815 DeadStackObjects.insert(&AI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000816
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000817 const DataLayout &DL = BB.getModule()->getDataLayout();
818
Owen Anderson32c4a052007-07-12 21:41:30 +0000819 // Scan the basic block backwards
820 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
821 --BBI;
Owen Anderson58704ee2011-09-06 18:14:09 +0000822
Bjorn Steinbrink2e2f6652015-08-20 08:58:47 +0000823 // If we find a store, check to see if it points into a dead stack value.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000824 if (hasMemoryWrite(&*BBI, *TLI) && isRemovable(&*BBI)) {
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000825 // See through pointer-to-pointer bitcasts
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000826 SmallVector<Value *, 4> Pointers;
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000827 GetUnderlyingObjects(getStoredPointerOperand(&*BBI), Pointers, DL);
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000828
Chris Lattner67122512010-11-30 21:58:14 +0000829 // Stores to stack values are valid candidates for removal.
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000830 bool AllDead = true;
831 for (SmallVectorImpl<Value *>::iterator I = Pointers.begin(),
832 E = Pointers.end(); I != E; ++I)
833 if (!DeadStackObjects.count(*I)) {
834 AllDead = false;
835 break;
836 }
837
838 if (AllDead) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000839 Instruction *Dead = &*BBI++;
Owen Anderson58704ee2011-09-06 18:14:09 +0000840
Chris Lattnerca335e32010-12-06 21:13:51 +0000841 DEBUG(dbgs() << "DSE: Dead Store at End of Block:\n DEAD: "
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000842 << *Dead << "\n Objects: ";
843 for (SmallVectorImpl<Value *>::iterator I = Pointers.begin(),
844 E = Pointers.end(); I != E; ++I) {
845 dbgs() << **I;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000846 if (std::next(I) != E)
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000847 dbgs() << ", ";
848 }
849 dbgs() << '\n');
Owen Anderson58704ee2011-09-06 18:14:09 +0000850
Chris Lattnerca335e32010-12-06 21:13:51 +0000851 // DCE instructions only used to calculate that store.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000852 DeleteDeadInstruction(Dead, *MD, *TLI, &DeadStackObjects);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000853 ++NumFastStores;
854 MadeChange = true;
Owen Andersone316e5b2011-08-30 21:11:06 +0000855 continue;
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000856 }
Owen Anderson52aaabf2007-08-08 17:50:09 +0000857 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000858
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000859 // Remove any dead non-memory-mutating instructions.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000860 if (isInstructionTriviallyDead(&*BBI, TLI)) {
861 Instruction *Inst = &*BBI++;
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000862 DeleteDeadInstruction(Inst, *MD, *TLI, &DeadStackObjects);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000863 ++NumFastOther;
864 MadeChange = true;
865 continue;
866 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000867
Eli Friedman08ec0a82012-08-08 02:17:32 +0000868 if (isa<AllocaInst>(BBI)) {
869 // Remove allocas from the list of dead stack objects; there can't be
870 // any references before the definition.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000871 DeadStackObjects.remove(&*BBI);
Nuno Lopes300d6292012-05-10 17:14:00 +0000872 continue;
873 }
874
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000875 if (auto CS = CallSite(&*BBI)) {
Eli Friedman08ec0a82012-08-08 02:17:32 +0000876 // Remove allocation function calls from the list of dead stack objects;
877 // there can't be any references before the definition.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000878 if (isAllocLikeFn(&*BBI, TLI))
879 DeadStackObjects.remove(&*BBI);
Eli Friedman08ec0a82012-08-08 02:17:32 +0000880
Chris Lattner127818d2010-11-30 21:18:46 +0000881 // If this call does not access memory, it can't be loading any of our
882 // pointers.
883 if (AA->doesNotAccessMemory(CS))
884 continue;
Owen Anderson58704ee2011-09-06 18:14:09 +0000885
Chris Lattner127818d2010-11-30 21:18:46 +0000886 // If the call might load from any of our allocas, then any store above
887 // the call is live.
Chandler Carruthd031fe92014-03-03 19:28:52 +0000888 DeadStackObjects.remove_if([&](Value *I) {
Benjamin Kramer3a377bc2014-03-01 11:47:00 +0000889 // See if the call site touches the value.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000890 ModRefInfo A = AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI));
Benjamin Kramer3a377bc2014-03-01 11:47:00 +0000891
Chandler Carruth194f59c2015-07-22 23:15:57 +0000892 return A == MRI_ModRef || A == MRI_Ref;
Chandler Carruthd031fe92014-03-03 19:28:52 +0000893 });
Owen Anderson58704ee2011-09-06 18:14:09 +0000894
Benjamin Kramer2b11eb02012-09-09 16:44:05 +0000895 // If all of the allocas were clobbered by the call then we're not going
896 // to find anything else to process.
Benjamin Kramer650b1db2012-10-14 10:21:31 +0000897 if (DeadStackObjects.empty())
Benjamin Kramer2b11eb02012-09-09 16:44:05 +0000898 break;
899
Chris Lattner127818d2010-11-30 21:18:46 +0000900 continue;
901 }
Eli Friedman89b694b2011-07-27 01:08:30 +0000902
Chandler Carruthac80dc72015-06-17 07:18:54 +0000903 MemoryLocation LoadedLoc;
Owen Anderson58704ee2011-09-06 18:14:09 +0000904
Owen Anderson32c4a052007-07-12 21:41:30 +0000905 // If we encounter a use of the pointer, it is no longer considered dead
Chris Lattner1adb6752008-11-28 00:27:14 +0000906 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
Eli Friedman9a468152011-08-17 22:22:24 +0000907 if (!L->isUnordered()) // Be conservative with atomic/volatile load
908 break;
Chandler Carruth70c61c12015-06-04 02:03:15 +0000909 LoadedLoc = MemoryLocation::get(L);
Nick Lewycky475d3d12010-01-03 04:39:07 +0000910 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
Chandler Carruth70c61c12015-06-04 02:03:15 +0000911 LoadedLoc = MemoryLocation::get(V);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000912 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(BBI)) {
Chandler Carruth70c61c12015-06-04 02:03:15 +0000913 LoadedLoc = MemoryLocation::getForSource(MTI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000914 } else if (!BBI->mayReadFromMemory()) {
915 // Instruction doesn't read memory. Note that stores that weren't removed
916 // above will hit this case.
Chris Lattner1adb6752008-11-28 00:27:14 +0000917 continue;
Eli Friedman89b694b2011-07-27 01:08:30 +0000918 } else {
919 // Unknown inst; assume it clobbers everything.
920 break;
Owen Anderson32c4a052007-07-12 21:41:30 +0000921 }
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000922
Chris Lattner7fe08b62010-11-30 21:32:12 +0000923 // Remove any allocas from the DeadPointer set that are loaded, as this
924 // makes any stores above the access live.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000925 RemoveAccessedObjects(LoadedLoc, DeadStackObjects, DL);
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000926
Chris Lattner7fe08b62010-11-30 21:32:12 +0000927 // If all of the allocas were clobbered by the access then we're not going
928 // to find anything else to process.
929 if (DeadStackObjects.empty())
930 break;
Owen Anderson32c4a052007-07-12 21:41:30 +0000931 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000932
Owen Anderson32c4a052007-07-12 21:41:30 +0000933 return MadeChange;
934}
935
Chris Lattner7fe08b62010-11-30 21:32:12 +0000936/// RemoveAccessedObjects - Check to see if the specified location may alias any
937/// of the stack objects in the DeadStackObjects set. If so, they become live
938/// because the location is being loaded.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000939void DSE::RemoveAccessedObjects(const MemoryLocation &LoadedLoc,
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000940 SmallSetVector<Value *, 16> &DeadStackObjects,
941 const DataLayout &DL) {
942 const Value *UnderlyingPointer = GetUnderlyingObject(LoadedLoc.Ptr, DL);
Chris Lattner7fe08b62010-11-30 21:32:12 +0000943
944 // A constant can't be in the dead pointer set.
945 if (isa<Constant>(UnderlyingPointer))
Chris Lattnerf80b3992010-11-30 21:38:30 +0000946 return;
Owen Anderson58704ee2011-09-06 18:14:09 +0000947
Chris Lattner7fe08b62010-11-30 21:32:12 +0000948 // If the kill pointer can be easily reduced to an alloca, don't bother doing
949 // extraneous AA queries.
Chris Lattnerf80b3992010-11-30 21:38:30 +0000950 if (isa<AllocaInst>(UnderlyingPointer) || isa<Argument>(UnderlyingPointer)) {
Evan Cheng773b2cd2012-06-16 04:28:11 +0000951 DeadStackObjects.remove(const_cast<Value*>(UnderlyingPointer));
Chris Lattnerf80b3992010-11-30 21:38:30 +0000952 return;
Owen Andersonddf4aee2007-08-08 18:38:28 +0000953 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000954
Benjamin Kramer650b1db2012-10-14 10:21:31 +0000955 // Remove objects that could alias LoadedLoc.
Benjamin Kramer9c794c72014-03-03 19:49:02 +0000956 DeadStackObjects.remove_if([&](Value *I) {
Benjamin Kramer3a377bc2014-03-01 11:47:00 +0000957 // See if the loaded location could alias the stack location.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000958 MemoryLocation StackLoc(I, getPointerSize(I, DL, *TLI));
Benjamin Kramer3a377bc2014-03-01 11:47:00 +0000959 return !AA->isNoAlias(StackLoc, LoadedLoc);
Benjamin Kramer9c794c72014-03-03 19:49:02 +0000960 });
Owen Anderson32c4a052007-07-12 21:41:30 +0000961}