blob: 1ff4329c8467d59427f2c4fe92a6fe660ed71f08 [file] [log] [blame]
Owen Andersone3590582007-08-02 18:11:11 +00001//===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
Owen Anderson5e72db32007-07-11 00:46:18 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson5e72db32007-07-11 00:46:18 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements a trivial dead store elimination that only considers
11// basic-block local redundant stores.
12//
13// FIXME: This should eventually be extended to be a post-dominator tree
14// traversal. Doing so would be pretty trivial.
15//
16//===----------------------------------------------------------------------===//
17
Owen Anderson10e52ed2007-08-01 06:36:51 +000018#define DEBUG_TYPE "dse"
Owen Anderson5e72db32007-07-11 00:46:18 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson32c4a052007-07-12 21:41:30 +000020#include "llvm/Constants.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000021#include "llvm/Function.h"
Chris Lattner903add82010-11-30 23:43:23 +000022#include "llvm/GlobalVariable.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000023#include "llvm/Instructions.h"
Owen Anderson48d37802008-01-29 06:18:36 +000024#include "llvm/IntrinsicInst.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000025#include "llvm/Pass.h"
Owen Andersonaa071722007-07-11 23:19:17 +000026#include "llvm/Analysis/AliasAnalysis.h"
Nick Lewycky32f80512011-10-22 21:59:35 +000027#include "llvm/Analysis/CaptureTracking.h"
Owen Anderson3f338972008-07-28 16:14:26 +000028#include "llvm/Analysis/Dominators.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000029#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000030#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattnerc0f33792010-11-30 23:05:20 +000031#include "llvm/Analysis/ValueTracking.h"
Owen Andersonaa071722007-07-11 23:19:17 +000032#include "llvm/Target/TargetData.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000033#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerca335e32010-12-06 21:13:51 +000034#include "llvm/Support/Debug.h"
Evan Cheng773b2cd2012-06-16 04:28:11 +000035#include "llvm/ADT/SetVector.h"
Chris Lattnerca335e32010-12-06 21:13:51 +000036#include "llvm/ADT/Statistic.h"
Nick Lewyckyf2905af2011-11-05 10:48:42 +000037#include "llvm/ADT/STLExtras.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000038using namespace llvm;
39
40STATISTIC(NumFastStores, "Number of stores deleted");
41STATISTIC(NumFastOther , "Number of other instrs removed");
42
43namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000044 struct DSE : public FunctionPass {
Chris Lattner51c28a92010-11-30 19:34:42 +000045 AliasAnalysis *AA;
46 MemoryDependenceAnalysis *MD;
Nick Lewyckyf2905af2011-11-05 10:48:42 +000047 DominatorTree *DT;
Chris Lattner51c28a92010-11-30 19:34:42 +000048
Owen Anderson5e72db32007-07-11 00:46:18 +000049 static char ID; // Pass identification, replacement for typeid
Nick Lewyckyf2905af2011-11-05 10:48:42 +000050 DSE() : FunctionPass(ID), AA(0), MD(0), DT(0) {
Owen Anderson6c18d1a2010-10-19 17:21:58 +000051 initializeDSEPass(*PassRegistry::getPassRegistry());
52 }
Owen Anderson5e72db32007-07-11 00:46:18 +000053
54 virtual bool runOnFunction(Function &F) {
Chris Lattner51c28a92010-11-30 19:34:42 +000055 AA = &getAnalysis<AliasAnalysis>();
56 MD = &getAnalysis<MemoryDependenceAnalysis>();
Nick Lewyckyf2905af2011-11-05 10:48:42 +000057 DT = &getAnalysis<DominatorTree>();
Owen Anderson58704ee2011-09-06 18:14:09 +000058
Chris Lattner51c28a92010-11-30 19:34:42 +000059 bool Changed = false;
Owen Anderson5e72db32007-07-11 00:46:18 +000060 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I)
Chris Lattnerc053cbb2010-02-11 05:11:54 +000061 // Only check non-dead blocks. Dead blocks may have strange pointer
62 // cycles that will confuse alias analysis.
Nick Lewyckyf2905af2011-11-05 10:48:42 +000063 if (DT->isReachableFromEntry(I))
Chris Lattnerc053cbb2010-02-11 05:11:54 +000064 Changed |= runOnBasicBlock(*I);
Owen Anderson58704ee2011-09-06 18:14:09 +000065
Nick Lewyckyf2905af2011-11-05 10:48:42 +000066 AA = 0; MD = 0; DT = 0;
Owen Anderson5e72db32007-07-11 00:46:18 +000067 return Changed;
68 }
Owen Anderson58704ee2011-09-06 18:14:09 +000069
Owen Anderson5e72db32007-07-11 00:46:18 +000070 bool runOnBasicBlock(BasicBlock &BB);
Chris Lattner9d179d92010-11-30 01:28:33 +000071 bool HandleFree(CallInst *F);
Chris Lattner1adb6752008-11-28 00:27:14 +000072 bool handleEndBlock(BasicBlock &BB);
Chris Lattner51d67ce2010-11-30 21:47:58 +000073 void RemoveAccessedObjects(const AliasAnalysis::Location &LoadedLoc,
Evan Cheng773b2cd2012-06-16 04:28:11 +000074 SmallSetVector<Value*, 16> &DeadStackObjects);
Owen Anderson5e72db32007-07-11 00:46:18 +000075
Owen Anderson5e72db32007-07-11 00:46:18 +000076 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
77 AU.setPreservesCFG();
Owen Anderson3f338972008-07-28 16:14:26 +000078 AU.addRequired<DominatorTree>();
Owen Andersonaa071722007-07-11 23:19:17 +000079 AU.addRequired<AliasAnalysis>();
Owen Anderson5e72db32007-07-11 00:46:18 +000080 AU.addRequired<MemoryDependenceAnalysis>();
Chris Lattner51c28a92010-11-30 19:34:42 +000081 AU.addPreserved<AliasAnalysis>();
Owen Anderson3f338972008-07-28 16:14:26 +000082 AU.addPreserved<DominatorTree>();
Owen Anderson5e72db32007-07-11 00:46:18 +000083 AU.addPreserved<MemoryDependenceAnalysis>();
84 }
85 };
Owen Anderson5e72db32007-07-11 00:46:18 +000086}
87
Dan Gohmand78c4002008-05-13 00:00:25 +000088char DSE::ID = 0;
Owen Anderson8ac477f2010-10-12 19:48:12 +000089INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
90INITIALIZE_PASS_DEPENDENCY(DominatorTree)
91INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
92INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
93INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
Dan Gohmand78c4002008-05-13 00:00:25 +000094
Owen Anderson10e52ed2007-08-01 06:36:51 +000095FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
Owen Anderson5e72db32007-07-11 00:46:18 +000096
Chris Lattner67122512010-11-30 21:58:14 +000097//===----------------------------------------------------------------------===//
98// Helper functions
99//===----------------------------------------------------------------------===//
100
101/// DeleteDeadInstruction - Delete this instruction. Before we do, go through
102/// and zero out all the operands of this instruction. If any of them become
103/// dead, delete them and the computation tree that feeds them.
104///
105/// If ValueSet is non-null, remove any deleted instructions from it as well.
106///
107static void DeleteDeadInstruction(Instruction *I,
108 MemoryDependenceAnalysis &MD,
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000109 const TargetLibraryInfo *TLI,
Evan Cheng773b2cd2012-06-16 04:28:11 +0000110 SmallSetVector<Value*, 16> *ValueSet = 0) {
Chris Lattner67122512010-11-30 21:58:14 +0000111 SmallVector<Instruction*, 32> NowDeadInsts;
Owen Anderson58704ee2011-09-06 18:14:09 +0000112
Chris Lattner67122512010-11-30 21:58:14 +0000113 NowDeadInsts.push_back(I);
114 --NumFastOther;
Owen Anderson58704ee2011-09-06 18:14:09 +0000115
Chris Lattner67122512010-11-30 21:58:14 +0000116 // Before we touch this instruction, remove it from memdep!
117 do {
118 Instruction *DeadInst = NowDeadInsts.pop_back_val();
119 ++NumFastOther;
Owen Anderson58704ee2011-09-06 18:14:09 +0000120
Chris Lattner67122512010-11-30 21:58:14 +0000121 // This instruction is dead, zap it, in stages. Start by removing it from
122 // MemDep, which needs to know the operands and needs it to be in the
123 // function.
124 MD.removeInstruction(DeadInst);
Owen Anderson58704ee2011-09-06 18:14:09 +0000125
Chris Lattner67122512010-11-30 21:58:14 +0000126 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
127 Value *Op = DeadInst->getOperand(op);
128 DeadInst->setOperand(op, 0);
Owen Anderson58704ee2011-09-06 18:14:09 +0000129
Chris Lattner67122512010-11-30 21:58:14 +0000130 // If this operand just became dead, add it to the NowDeadInsts list.
131 if (!Op->use_empty()) continue;
Owen Anderson58704ee2011-09-06 18:14:09 +0000132
Chris Lattner67122512010-11-30 21:58:14 +0000133 if (Instruction *OpI = dyn_cast<Instruction>(Op))
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000134 if (isInstructionTriviallyDead(OpI, TLI))
Chris Lattner67122512010-11-30 21:58:14 +0000135 NowDeadInsts.push_back(OpI);
136 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000137
Chris Lattner67122512010-11-30 21:58:14 +0000138 DeadInst->eraseFromParent();
Owen Anderson58704ee2011-09-06 18:14:09 +0000139
Evan Cheng773b2cd2012-06-16 04:28:11 +0000140 if (ValueSet) ValueSet->remove(DeadInst);
Chris Lattner67122512010-11-30 21:58:14 +0000141 } while (!NowDeadInsts.empty());
142}
143
144
Chris Lattner2227a8a2010-11-30 01:37:52 +0000145/// hasMemoryWrite - Does this instruction write some memory? This only returns
146/// true for things that we can analyze with other helpers below.
147static bool hasMemoryWrite(Instruction *I) {
Nick Lewycky90271472009-11-10 06:46:40 +0000148 if (isa<StoreInst>(I))
149 return true;
150 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
151 switch (II->getIntrinsicID()) {
Chris Lattner2764b4d2009-12-02 06:35:55 +0000152 default:
153 return false;
154 case Intrinsic::memset:
155 case Intrinsic::memmove:
156 case Intrinsic::memcpy:
157 case Intrinsic::init_trampoline:
158 case Intrinsic::lifetime_end:
159 return true;
Nick Lewycky90271472009-11-10 06:46:40 +0000160 }
161 }
162 return false;
163}
164
Chris Lattner58b779e2010-11-30 07:23:21 +0000165/// getLocForWrite - Return a Location stored to by the specified instruction.
Eli Friedman72a93e52011-09-13 01:28:59 +0000166/// If isRemovable returns true, this function and getLocForRead completely
167/// describe the memory operations for this instruction.
Chris Lattner58b779e2010-11-30 07:23:21 +0000168static AliasAnalysis::Location
169getLocForWrite(Instruction *Inst, AliasAnalysis &AA) {
170 if (StoreInst *SI = dyn_cast<StoreInst>(Inst))
171 return AA.getLocation(SI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000172
Chris Lattner58b779e2010-11-30 07:23:21 +0000173 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(Inst)) {
174 // memcpy/memmove/memset.
175 AliasAnalysis::Location Loc = AA.getLocationForDest(MI);
176 // If we don't have target data around, an unknown size in Location means
177 // that we should use the size of the pointee type. This isn't valid for
178 // memset/memcpy, which writes more than an i8.
179 if (Loc.Size == AliasAnalysis::UnknownSize && AA.getTargetData() == 0)
180 return AliasAnalysis::Location();
181 return Loc;
182 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000183
Chris Lattner58b779e2010-11-30 07:23:21 +0000184 IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst);
185 if (II == 0) return AliasAnalysis::Location();
Owen Anderson58704ee2011-09-06 18:14:09 +0000186
Chris Lattner58b779e2010-11-30 07:23:21 +0000187 switch (II->getIntrinsicID()) {
188 default: return AliasAnalysis::Location(); // Unhandled intrinsic.
189 case Intrinsic::init_trampoline:
190 // If we don't have target data around, an unknown size in Location means
191 // that we should use the size of the pointee type. This isn't valid for
192 // init.trampoline, which writes more than an i8.
193 if (AA.getTargetData() == 0) return AliasAnalysis::Location();
Owen Anderson58704ee2011-09-06 18:14:09 +0000194
Chris Lattner58b779e2010-11-30 07:23:21 +0000195 // FIXME: We don't know the size of the trampoline, so we can't really
196 // handle it here.
197 return AliasAnalysis::Location(II->getArgOperand(0));
198 case Intrinsic::lifetime_end: {
199 uint64_t Len = cast<ConstantInt>(II->getArgOperand(0))->getZExtValue();
200 return AliasAnalysis::Location(II->getArgOperand(1), Len);
201 }
202 }
203}
204
Chris Lattner94fbdf32010-12-06 01:48:06 +0000205/// getLocForRead - Return the location read by the specified "hasMemoryWrite"
206/// instruction if any.
Owen Anderson58704ee2011-09-06 18:14:09 +0000207static AliasAnalysis::Location
Chris Lattner94fbdf32010-12-06 01:48:06 +0000208getLocForRead(Instruction *Inst, AliasAnalysis &AA) {
209 assert(hasMemoryWrite(Inst) && "Unknown instruction case");
Owen Anderson58704ee2011-09-06 18:14:09 +0000210
Chris Lattner94fbdf32010-12-06 01:48:06 +0000211 // The only instructions that both read and write are the mem transfer
212 // instructions (memcpy/memmove).
213 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(Inst))
214 return AA.getLocationForSource(MTI);
215 return AliasAnalysis::Location();
216}
217
218
Chris Lattner3590ef82010-11-30 05:30:45 +0000219/// isRemovable - If the value of this instruction and the memory it writes to
220/// is unused, may we delete this instruction?
221static bool isRemovable(Instruction *I) {
Eli Friedman9a468152011-08-17 22:22:24 +0000222 // Don't remove volatile/atomic stores.
Nick Lewycky90271472009-11-10 06:46:40 +0000223 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Eli Friedman9a468152011-08-17 22:22:24 +0000224 return SI->isUnordered();
Owen Anderson58704ee2011-09-06 18:14:09 +0000225
Chris Lattnerb63ba732010-11-30 19:12:10 +0000226 IntrinsicInst *II = cast<IntrinsicInst>(I);
227 switch (II->getIntrinsicID()) {
Craig Toppera2886c22012-02-07 05:05:23 +0000228 default: llvm_unreachable("doesn't pass 'hasMemoryWrite' predicate");
Chris Lattnerb63ba732010-11-30 19:12:10 +0000229 case Intrinsic::lifetime_end:
230 // Never remove dead lifetime_end's, e.g. because it is followed by a
231 // free.
232 return false;
233 case Intrinsic::init_trampoline:
234 // Always safe to remove init_trampoline.
235 return true;
Owen Anderson58704ee2011-09-06 18:14:09 +0000236
Chris Lattnerb63ba732010-11-30 19:12:10 +0000237 case Intrinsic::memset:
238 case Intrinsic::memmove:
239 case Intrinsic::memcpy:
240 // Don't remove volatile memory intrinsics.
241 return !cast<MemIntrinsic>(II)->isVolatile();
242 }
Nick Lewycky90271472009-11-10 06:46:40 +0000243}
244
Pete Cooper856977c2011-11-09 23:07:35 +0000245
246/// isShortenable - Returns true if this instruction can be safely shortened in
247/// length.
248static bool isShortenable(Instruction *I) {
249 // Don't shorten stores for now
250 if (isa<StoreInst>(I))
251 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000252
Pete Cooper856977c2011-11-09 23:07:35 +0000253 IntrinsicInst *II = cast<IntrinsicInst>(I);
254 switch (II->getIntrinsicID()) {
255 default: return false;
256 case Intrinsic::memset:
257 case Intrinsic::memcpy:
258 // Do shorten memory intrinsics.
259 return true;
260 }
261}
262
Chris Lattner67122512010-11-30 21:58:14 +0000263/// getStoredPointerOperand - Return the pointer that is being written to.
264static Value *getStoredPointerOperand(Instruction *I) {
Nick Lewycky90271472009-11-10 06:46:40 +0000265 if (StoreInst *SI = dyn_cast<StoreInst>(I))
266 return SI->getPointerOperand();
267 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
Chris Lattner67122512010-11-30 21:58:14 +0000268 return MI->getDest();
Gabor Greif91f95892010-06-24 12:03:56 +0000269
270 IntrinsicInst *II = cast<IntrinsicInst>(I);
271 switch (II->getIntrinsicID()) {
Craig Toppera2886c22012-02-07 05:05:23 +0000272 default: llvm_unreachable("Unexpected intrinsic!");
Chris Lattner2764b4d2009-12-02 06:35:55 +0000273 case Intrinsic::init_trampoline:
Gabor Greif91f95892010-06-24 12:03:56 +0000274 return II->getArgOperand(0);
Duncan Sands1925d3a2009-11-10 13:49:50 +0000275 }
Nick Lewycky90271472009-11-10 06:46:40 +0000276}
277
Nick Lewyckyc7f1e792011-11-16 03:49:48 +0000278static uint64_t getPointerSize(const Value *V, AliasAnalysis &AA) {
Nuno Lopes55fff832012-06-21 15:45:28 +0000279 uint64_t Size;
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000280 if (getObjectSize(V, Size, AA.getTargetData(), AA.getTargetLibraryInfo()))
Nuno Lopes55fff832012-06-21 15:45:28 +0000281 return Size;
Nick Lewyckyc7f1e792011-11-16 03:49:48 +0000282 return AliasAnalysis::UnknownSize;
Chris Lattner903add82010-11-30 23:43:23 +0000283}
Chris Lattner51c28a92010-11-30 19:34:42 +0000284
Pete Cooper856977c2011-11-09 23:07:35 +0000285namespace {
286 enum OverwriteResult
287 {
288 OverwriteComplete,
289 OverwriteEnd,
290 OverwriteUnknown
291 };
292}
293
294/// isOverwrite - Return 'OverwriteComplete' if a store to the 'Later' location
Chris Lattner58b779e2010-11-30 07:23:21 +0000295/// completely overwrites a store to the 'Earlier' location.
Nadav Rotem465834c2012-07-24 10:51:42 +0000296/// 'OverwriteEnd' if the end of the 'Earlier' location is completely
Pete Cooper39b52552012-02-28 05:06:24 +0000297/// overwritten by 'Later', or 'OverwriteUnknown' if nothing can be determined
Pete Cooper856977c2011-11-09 23:07:35 +0000298static OverwriteResult isOverwrite(const AliasAnalysis::Location &Later,
299 const AliasAnalysis::Location &Earlier,
300 AliasAnalysis &AA,
Nick Lewyckyc7f1e792011-11-16 03:49:48 +0000301 int64_t &EarlierOff,
302 int64_t &LaterOff) {
Chris Lattnerc0f33792010-11-30 23:05:20 +0000303 const Value *P1 = Earlier.Ptr->stripPointerCasts();
304 const Value *P2 = Later.Ptr->stripPointerCasts();
Owen Anderson58704ee2011-09-06 18:14:09 +0000305
Chris Lattnerc0f33792010-11-30 23:05:20 +0000306 // If the start pointers are the same, we just have to compare sizes to see if
307 // the later store was larger than the earlier store.
308 if (P1 == P2) {
309 // If we don't know the sizes of either access, then we can't do a
310 // comparison.
311 if (Later.Size == AliasAnalysis::UnknownSize ||
312 Earlier.Size == AliasAnalysis::UnknownSize) {
313 // If we have no TargetData information around, then the size of the store
314 // is inferrable from the pointee type. If they are the same type, then
315 // we know that the store is safe.
Pete Cooper856977c2011-11-09 23:07:35 +0000316 if (AA.getTargetData() == 0 &&
317 Later.Ptr->getType() == Earlier.Ptr->getType())
318 return OverwriteComplete;
Nadav Rotem465834c2012-07-24 10:51:42 +0000319
Pete Cooper856977c2011-11-09 23:07:35 +0000320 return OverwriteUnknown;
Chris Lattnerc0f33792010-11-30 23:05:20 +0000321 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000322
Chris Lattnerc0f33792010-11-30 23:05:20 +0000323 // Make sure that the Later size is >= the Earlier size.
Pete Cooper856977c2011-11-09 23:07:35 +0000324 if (Later.Size >= Earlier.Size)
325 return OverwriteComplete;
Chris Lattner77d79fa2010-11-30 19:28:23 +0000326 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000327
Chris Lattnerc0f33792010-11-30 23:05:20 +0000328 // Otherwise, we have to have size information, and the later store has to be
329 // larger than the earlier one.
330 if (Later.Size == AliasAnalysis::UnknownSize ||
331 Earlier.Size == AliasAnalysis::UnknownSize ||
Pete Cooper856977c2011-11-09 23:07:35 +0000332 AA.getTargetData() == 0)
333 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000334
Chris Lattner903add82010-11-30 23:43:23 +0000335 // Check to see if the later store is to the entire object (either a global,
336 // an alloca, or a byval argument). If so, then it clearly overwrites any
337 // other store to the same object.
Chris Lattnerc0f33792010-11-30 23:05:20 +0000338 const TargetData &TD = *AA.getTargetData();
Owen Anderson58704ee2011-09-06 18:14:09 +0000339
Dan Gohman0f124e12011-01-24 18:53:32 +0000340 const Value *UO1 = GetUnderlyingObject(P1, &TD),
341 *UO2 = GetUnderlyingObject(P2, &TD);
Owen Anderson58704ee2011-09-06 18:14:09 +0000342
Chris Lattner903add82010-11-30 23:43:23 +0000343 // If we can't resolve the same pointers to the same object, then we can't
344 // analyze them at all.
345 if (UO1 != UO2)
Pete Cooper856977c2011-11-09 23:07:35 +0000346 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000347
Chris Lattner903add82010-11-30 23:43:23 +0000348 // If the "Later" store is to a recognizable object, get its size.
Nick Lewyckyc7f1e792011-11-16 03:49:48 +0000349 uint64_t ObjectSize = getPointerSize(UO2, AA);
350 if (ObjectSize != AliasAnalysis::UnknownSize)
Pete Coopera4237c32011-11-10 20:22:08 +0000351 if (ObjectSize == Later.Size && ObjectSize >= Earlier.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000352 return OverwriteComplete;
Owen Anderson58704ee2011-09-06 18:14:09 +0000353
Chris Lattnerc0f33792010-11-30 23:05:20 +0000354 // Okay, we have stores to two completely different pointers. Try to
355 // decompose the pointer into a "base + constant_offset" form. If the base
356 // pointers are equal, then we can reason about the two stores.
Pete Cooper856977c2011-11-09 23:07:35 +0000357 EarlierOff = 0;
358 LaterOff = 0;
Bill Wendling19f33b92011-03-26 08:02:59 +0000359 const Value *BP1 = GetPointerBaseWithConstantOffset(P1, EarlierOff, TD);
360 const Value *BP2 = GetPointerBaseWithConstantOffset(P2, LaterOff, TD);
Owen Anderson58704ee2011-09-06 18:14:09 +0000361
Chris Lattnerc0f33792010-11-30 23:05:20 +0000362 // If the base pointers still differ, we have two completely different stores.
363 if (BP1 != BP2)
Pete Cooper856977c2011-11-09 23:07:35 +0000364 return OverwriteUnknown;
Bill Wendlingdb40b5c2011-03-26 01:20:37 +0000365
Bill Wendling19f33b92011-03-26 08:02:59 +0000366 // The later store completely overlaps the earlier store if:
Owen Anderson58704ee2011-09-06 18:14:09 +0000367 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000368 // 1. Both start at the same offset and the later one's size is greater than
369 // or equal to the earlier one's, or
370 //
371 // |--earlier--|
372 // |-- later --|
Owen Anderson58704ee2011-09-06 18:14:09 +0000373 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000374 // 2. The earlier store has an offset greater than the later offset, but which
375 // still lies completely within the later store.
376 //
377 // |--earlier--|
378 // |----- later ------|
Bill Wendling50341592011-03-30 21:37:19 +0000379 //
380 // We have to be careful here as *Off is signed while *.Size is unsigned.
Bill Wendlingb5139922011-03-26 09:32:07 +0000381 if (EarlierOff >= LaterOff &&
Craig Topper2a404182012-08-14 07:32:05 +0000382 Later.Size >= Earlier.Size &&
Bill Wendling50341592011-03-30 21:37:19 +0000383 uint64_t(EarlierOff - LaterOff) + Earlier.Size <= Later.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000384 return OverwriteComplete;
Nadav Rotem465834c2012-07-24 10:51:42 +0000385
Pete Cooper856977c2011-11-09 23:07:35 +0000386 // The other interesting case is if the later store overwrites the end of
387 // the earlier store
388 //
389 // |--earlier--|
390 // |-- later --|
391 //
392 // In this case we may want to trim the size of earlier to avoid generating
393 // writes to addresses which will definitely be overwritten later
394 if (LaterOff > EarlierOff &&
395 LaterOff < int64_t(EarlierOff + Earlier.Size) &&
Pete Coopere03fe832011-12-03 00:04:30 +0000396 int64_t(LaterOff + Later.Size) >= int64_t(EarlierOff + Earlier.Size))
Pete Cooper856977c2011-11-09 23:07:35 +0000397 return OverwriteEnd;
Bill Wendling19f33b92011-03-26 08:02:59 +0000398
399 // Otherwise, they don't completely overlap.
Pete Cooper856977c2011-11-09 23:07:35 +0000400 return OverwriteUnknown;
Nick Lewycky90271472009-11-10 06:46:40 +0000401}
402
Chris Lattner94fbdf32010-12-06 01:48:06 +0000403/// isPossibleSelfRead - If 'Inst' might be a self read (i.e. a noop copy of a
404/// memory region into an identical pointer) then it doesn't actually make its
Owen Anderson58704ee2011-09-06 18:14:09 +0000405/// input dead in the traditional sense. Consider this case:
Chris Lattner94fbdf32010-12-06 01:48:06 +0000406///
407/// memcpy(A <- B)
408/// memcpy(A <- A)
409///
410/// In this case, the second store to A does not make the first store to A dead.
411/// The usual situation isn't an explicit A<-A store like this (which can be
412/// trivially removed) but a case where two pointers may alias.
413///
414/// This function detects when it is unsafe to remove a dependent instruction
415/// because the DSE inducing instruction may be a self-read.
416static bool isPossibleSelfRead(Instruction *Inst,
417 const AliasAnalysis::Location &InstStoreLoc,
418 Instruction *DepWrite, AliasAnalysis &AA) {
419 // Self reads can only happen for instructions that read memory. Get the
420 // location read.
421 AliasAnalysis::Location InstReadLoc = getLocForRead(Inst, AA);
422 if (InstReadLoc.Ptr == 0) return false; // Not a reading instruction.
Owen Anderson58704ee2011-09-06 18:14:09 +0000423
Chris Lattner94fbdf32010-12-06 01:48:06 +0000424 // If the read and written loc obviously don't alias, it isn't a read.
425 if (AA.isNoAlias(InstReadLoc, InstStoreLoc)) return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000426
Chris Lattner94fbdf32010-12-06 01:48:06 +0000427 // Okay, 'Inst' may copy over itself. However, we can still remove a the
428 // DepWrite instruction if we can prove that it reads from the same location
429 // as Inst. This handles useful cases like:
430 // memcpy(A <- B)
431 // memcpy(A <- B)
432 // Here we don't know if A/B may alias, but we do know that B/B are must
433 // aliases, so removing the first memcpy is safe (assuming it writes <= #
434 // bytes as the second one.
435 AliasAnalysis::Location DepReadLoc = getLocForRead(DepWrite, AA);
Owen Anderson58704ee2011-09-06 18:14:09 +0000436
Chris Lattner94fbdf32010-12-06 01:48:06 +0000437 if (DepReadLoc.Ptr && AA.isMustAlias(InstReadLoc.Ptr, DepReadLoc.Ptr))
438 return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000439
Chris Lattner94fbdf32010-12-06 01:48:06 +0000440 // If DepWrite doesn't read memory or if we can't prove it is a must alias,
441 // then it can't be considered dead.
442 return true;
443}
444
Chris Lattner67122512010-11-30 21:58:14 +0000445
446//===----------------------------------------------------------------------===//
447// DSE Pass
448//===----------------------------------------------------------------------===//
449
Owen Anderson10e52ed2007-08-01 06:36:51 +0000450bool DSE::runOnBasicBlock(BasicBlock &BB) {
Owen Anderson5e72db32007-07-11 00:46:18 +0000451 bool MadeChange = false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000452
Chris Lattner49162672009-09-02 06:31:02 +0000453 // Do a top-down walk on the BB.
Chris Lattnerf2a8ba42008-11-28 21:29:52 +0000454 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
455 Instruction *Inst = BBI++;
Owen Anderson58704ee2011-09-06 18:14:09 +0000456
Chris Lattner9d179d92010-11-30 01:28:33 +0000457 // Handle 'free' calls specially.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000458 if (CallInst *F = isFreeCall(Inst, AA->getTargetLibraryInfo())) {
Chris Lattner9d179d92010-11-30 01:28:33 +0000459 MadeChange |= HandleFree(F);
460 continue;
461 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000462
Chris Lattner2227a8a2010-11-30 01:37:52 +0000463 // If we find something that writes memory, get its memory dependence.
464 if (!hasMemoryWrite(Inst))
Owen Anderson0aecf0e2007-08-08 04:52:29 +0000465 continue;
Chris Lattnerd4f10902010-11-30 00:01:19 +0000466
Chris Lattner51c28a92010-11-30 19:34:42 +0000467 MemDepResult InstDep = MD->getDependency(Inst);
Owen Anderson58704ee2011-09-06 18:14:09 +0000468
Eli Friedman7d58bc72011-06-15 00:47:34 +0000469 // Ignore any store where we can't find a local dependence.
Chris Lattner57e91ea2008-12-06 00:53:22 +0000470 // FIXME: cross-block DSE would be fun. :)
Eli Friedmanc1702c82011-10-13 22:14:57 +0000471 if (!InstDep.isDef() && !InstDep.isClobber())
Chris Lattner58b779e2010-11-30 07:23:21 +0000472 continue;
Owen Anderson58704ee2011-09-06 18:14:09 +0000473
Chris Lattner57e91ea2008-12-06 00:53:22 +0000474 // If we're storing the same value back to a pointer that we just
475 // loaded from, then the store can be removed.
Nick Lewycky90271472009-11-10 06:46:40 +0000476 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
477 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) {
478 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
Eli Friedman9a468152011-08-17 22:22:24 +0000479 SI->getOperand(0) == DepLoad && isRemovable(SI)) {
Chris Lattnerca335e32010-12-06 21:13:51 +0000480 DEBUG(dbgs() << "DSE: Remove Store Of Load from same pointer:\n "
481 << "LOAD: " << *DepLoad << "\n STORE: " << *SI << '\n');
Owen Anderson58704ee2011-09-06 18:14:09 +0000482
Nick Lewycky90271472009-11-10 06:46:40 +0000483 // DeleteDeadInstruction can delete the current instruction. Save BBI
484 // in case we need it.
485 WeakVH NextInst(BBI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000486
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000487 DeleteDeadInstruction(SI, *MD, AA->getTargetLibraryInfo());
Owen Anderson58704ee2011-09-06 18:14:09 +0000488
Nick Lewycky90271472009-11-10 06:46:40 +0000489 if (NextInst == 0) // Next instruction deleted.
490 BBI = BB.begin();
491 else if (BBI != BB.begin()) // Revisit this instruction if possible.
492 --BBI;
Dan Gohmand2d1ae12010-06-22 15:08:57 +0000493 ++NumFastStores;
Nick Lewycky90271472009-11-10 06:46:40 +0000494 MadeChange = true;
495 continue;
496 }
Chris Lattner0e3d6332008-12-05 21:04:20 +0000497 }
Owen Anderson5e72db32007-07-11 00:46:18 +0000498 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000499
Chris Lattner58b779e2010-11-30 07:23:21 +0000500 // Figure out what location is being stored to.
Chris Lattner51c28a92010-11-30 19:34:42 +0000501 AliasAnalysis::Location Loc = getLocForWrite(Inst, *AA);
Chris Lattner58b779e2010-11-30 07:23:21 +0000502
503 // If we didn't get a useful location, fail.
504 if (Loc.Ptr == 0)
505 continue;
Owen Anderson58704ee2011-09-06 18:14:09 +0000506
Eli Friedmanc1702c82011-10-13 22:14:57 +0000507 while (InstDep.isDef() || InstDep.isClobber()) {
Chris Lattner58b779e2010-11-30 07:23:21 +0000508 // Get the memory clobbered by the instruction we depend on. MemDep will
509 // skip any instructions that 'Loc' clearly doesn't interact with. If we
510 // end up depending on a may- or must-aliased load, then we can't optimize
511 // away the store and we bail out. However, if we depend on on something
512 // that overwrites the memory location we *can* potentially optimize it.
513 //
Chris Lattner0ab5e2c2011-04-15 05:18:47 +0000514 // Find out what memory location the dependent instruction stores.
Chris Lattner58b779e2010-11-30 07:23:21 +0000515 Instruction *DepWrite = InstDep.getInst();
Chris Lattner51c28a92010-11-30 19:34:42 +0000516 AliasAnalysis::Location DepLoc = getLocForWrite(DepWrite, *AA);
Chris Lattner58b779e2010-11-30 07:23:21 +0000517 // If we didn't get a useful location, or if it isn't a size, bail out.
518 if (DepLoc.Ptr == 0)
519 break;
520
Chris Lattner94fbdf32010-12-06 01:48:06 +0000521 // If we find a write that is a) removable (i.e., non-volatile), b) is
522 // completely obliterated by the store to 'Loc', and c) which we know that
523 // 'Inst' doesn't load from, then we can remove it.
Nadav Rotem465834c2012-07-24 10:51:42 +0000524 if (isRemovable(DepWrite) &&
Chris Lattner94fbdf32010-12-06 01:48:06 +0000525 !isPossibleSelfRead(Inst, Loc, DepWrite, *AA)) {
Nadav Rotem465834c2012-07-24 10:51:42 +0000526 int64_t InstWriteOffset, DepWriteOffset;
527 OverwriteResult OR = isOverwrite(Loc, DepLoc, *AA,
528 DepWriteOffset, InstWriteOffset);
Pete Cooper856977c2011-11-09 23:07:35 +0000529 if (OR == OverwriteComplete) {
530 DEBUG(dbgs() << "DSE: Remove Dead Store:\n DEAD: "
531 << *DepWrite << "\n KILLER: " << *Inst << '\n');
Owen Anderson58704ee2011-09-06 18:14:09 +0000532
Pete Cooper856977c2011-11-09 23:07:35 +0000533 // Delete the store and now-dead instructions that feed it.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000534 DeleteDeadInstruction(DepWrite, *MD, AA->getTargetLibraryInfo());
Pete Cooper856977c2011-11-09 23:07:35 +0000535 ++NumFastStores;
536 MadeChange = true;
Nadav Rotem465834c2012-07-24 10:51:42 +0000537
Pete Cooper856977c2011-11-09 23:07:35 +0000538 // DeleteDeadInstruction can delete the current instruction in loop
539 // cases, reset BBI.
540 BBI = Inst;
541 if (BBI != BB.begin())
542 --BBI;
543 break;
544 } else if (OR == OverwriteEnd && isShortenable(DepWrite)) {
545 // TODO: base this on the target vector size so that if the earlier
546 // store was too small to get vector writes anyway then its likely
547 // a good idea to shorten it
548 // Power of 2 vector writes are probably always a bad idea to optimize
549 // as any store/memset/memcpy is likely using vector instructions so
550 // shortening it to not vector size is likely to be slower
551 MemIntrinsic* DepIntrinsic = cast<MemIntrinsic>(DepWrite);
552 unsigned DepWriteAlign = DepIntrinsic->getAlignment();
553 if (llvm::isPowerOf2_64(InstWriteOffset) ||
554 ((DepWriteAlign != 0) && InstWriteOffset % DepWriteAlign == 0)) {
Nadav Rotem465834c2012-07-24 10:51:42 +0000555
Pete Cooper856977c2011-11-09 23:07:35 +0000556 DEBUG(dbgs() << "DSE: Remove Dead Store:\n OW END: "
Nadav Rotem465834c2012-07-24 10:51:42 +0000557 << *DepWrite << "\n KILLER (offset "
558 << InstWriteOffset << ", "
Pete Cooper856977c2011-11-09 23:07:35 +0000559 << DepLoc.Size << ")"
560 << *Inst << '\n');
Nadav Rotem465834c2012-07-24 10:51:42 +0000561
Pete Cooper856977c2011-11-09 23:07:35 +0000562 Value* DepWriteLength = DepIntrinsic->getLength();
563 Value* TrimmedLength = ConstantInt::get(DepWriteLength->getType(),
Nadav Rotem465834c2012-07-24 10:51:42 +0000564 InstWriteOffset -
Pete Cooper856977c2011-11-09 23:07:35 +0000565 DepWriteOffset);
566 DepIntrinsic->setLength(TrimmedLength);
567 MadeChange = true;
568 }
569 }
Chris Lattner58b779e2010-11-30 07:23:21 +0000570 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000571
Chris Lattnerd4f10902010-11-30 00:01:19 +0000572 // If this is a may-aliased store that is clobbering the store value, we
573 // can keep searching past it for another must-aliased pointer that stores
574 // to the same location. For example, in:
575 // store -> P
576 // store -> Q
577 // store -> P
578 // we can remove the first store to P even though we don't know if P and Q
579 // alias.
Chris Lattner58b779e2010-11-30 07:23:21 +0000580 if (DepWrite == &BB.front()) break;
Owen Anderson58704ee2011-09-06 18:14:09 +0000581
Chris Lattner58b779e2010-11-30 07:23:21 +0000582 // Can't look past this instruction if it might read 'Loc'.
Chris Lattner51c28a92010-11-30 19:34:42 +0000583 if (AA->getModRefInfo(DepWrite, Loc) & AliasAnalysis::Ref)
Chris Lattner58b779e2010-11-30 07:23:21 +0000584 break;
Owen Anderson58704ee2011-09-06 18:14:09 +0000585
Chris Lattner51c28a92010-11-30 19:34:42 +0000586 InstDep = MD->getPointerDependencyFrom(Loc, false, DepWrite, &BB);
Owen Anderson2b2bd282009-10-28 07:05:35 +0000587 }
Owen Anderson5e72db32007-07-11 00:46:18 +0000588 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000589
Chris Lattnerf2a8ba42008-11-28 21:29:52 +0000590 // If this block ends in a return, unwind, or unreachable, all allocas are
591 // dead at its end, which means stores to them are also dead.
Owen Anderson32c4a052007-07-12 21:41:30 +0000592 if (BB.getTerminator()->getNumSuccessors() == 0)
Chris Lattner1adb6752008-11-28 00:27:14 +0000593 MadeChange |= handleEndBlock(BB);
Owen Anderson58704ee2011-09-06 18:14:09 +0000594
Owen Anderson5e72db32007-07-11 00:46:18 +0000595 return MadeChange;
596}
597
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000598/// Find all blocks that will unconditionally lead to the block BB and append
599/// them to F.
600static void FindUnconditionalPreds(SmallVectorImpl<BasicBlock *> &Blocks,
601 BasicBlock *BB, DominatorTree *DT) {
602 for (pred_iterator I = pred_begin(BB), E = pred_end(BB); I != E; ++I) {
603 BasicBlock *Pred = *I;
Nick Lewyckyfe970722011-12-08 22:36:35 +0000604 if (Pred == BB) continue;
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000605 TerminatorInst *PredTI = Pred->getTerminator();
606 if (PredTI->getNumSuccessors() != 1)
607 continue;
608
609 if (DT->isReachableFromEntry(Pred))
610 Blocks.push_back(Pred);
611 }
612}
613
Chris Lattner9d179d92010-11-30 01:28:33 +0000614/// HandleFree - Handle frees of entire structures whose dependency is a store
615/// to a field of that structure.
616bool DSE::HandleFree(CallInst *F) {
Eli Friedman7d58bc72011-06-15 00:47:34 +0000617 bool MadeChange = false;
618
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000619 AliasAnalysis::Location Loc = AliasAnalysis::Location(F->getOperand(0));
620 SmallVector<BasicBlock *, 16> Blocks;
621 Blocks.push_back(F->getParent());
Eli Friedman7d58bc72011-06-15 00:47:34 +0000622
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000623 while (!Blocks.empty()) {
624 BasicBlock *BB = Blocks.pop_back_val();
625 Instruction *InstPt = BB->getTerminator();
626 if (BB == F->getParent()) InstPt = F;
Owen Anderson58704ee2011-09-06 18:14:09 +0000627
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000628 MemDepResult Dep = MD->getPointerDependencyFrom(Loc, false, InstPt, BB);
629 while (Dep.isDef() || Dep.isClobber()) {
630 Instruction *Dependency = Dep.getInst();
631 if (!hasMemoryWrite(Dependency) || !isRemovable(Dependency))
632 break;
Duncan Sandsfe3bef02008-01-20 10:49:23 +0000633
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000634 Value *DepPointer =
635 GetUnderlyingObject(getStoredPointerOperand(Dependency));
Owen Anderson58704ee2011-09-06 18:14:09 +0000636
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000637 // Check for aliasing.
638 if (!AA->isMustAlias(F->getArgOperand(0), DepPointer))
639 break;
Dan Gohmand4b7fff2010-11-12 02:19:17 +0000640
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000641 Instruction *Next = llvm::next(BasicBlock::iterator(Dependency));
642
643 // DCE instructions only used to calculate that store
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000644 DeleteDeadInstruction(Dependency, *MD, AA->getTargetLibraryInfo());
Nick Lewyckyf2905af2011-11-05 10:48:42 +0000645 ++NumFastStores;
646 MadeChange = true;
647
648 // Inst's old Dependency is now deleted. Compute the next dependency,
649 // which may also be dead, as in
650 // s[0] = 0;
651 // s[1] = 0; // This has just been deleted.
652 // free(s);
653 Dep = MD->getPointerDependencyFrom(Loc, false, Next, BB);
654 }
655
656 if (Dep.isNonLocal())
657 FindUnconditionalPreds(Blocks, BB, DT);
658 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000659
Eli Friedman7d58bc72011-06-15 00:47:34 +0000660 return MadeChange;
Owen Andersonaa071722007-07-11 23:19:17 +0000661}
662
Owen Andersone3590582007-08-02 18:11:11 +0000663/// handleEndBlock - Remove dead stores to stack-allocated locations in the
Owen Anderson52aaabf2007-08-08 17:50:09 +0000664/// function end block. Ex:
665/// %A = alloca i32
666/// ...
667/// store i32 1, i32* %A
668/// ret void
Chris Lattner1adb6752008-11-28 00:27:14 +0000669bool DSE::handleEndBlock(BasicBlock &BB) {
Owen Anderson32c4a052007-07-12 21:41:30 +0000670 bool MadeChange = false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000671
Chris Lattner7fe08b62010-11-30 21:32:12 +0000672 // Keep track of all of the stack objects that are dead at the end of the
673 // function.
Evan Cheng773b2cd2012-06-16 04:28:11 +0000674 SmallSetVector<Value*, 16> DeadStackObjects;
Owen Anderson58704ee2011-09-06 18:14:09 +0000675
Chris Lattner1adb6752008-11-28 00:27:14 +0000676 // Find all of the alloca'd pointers in the entry block.
Owen Anderson32c4a052007-07-12 21:41:30 +0000677 BasicBlock *Entry = BB.getParent()->begin();
Nick Lewycky32f80512011-10-22 21:59:35 +0000678 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I) {
Nuno Lopes55fff832012-06-21 15:45:28 +0000679 if (isa<AllocaInst>(I))
680 DeadStackObjects.insert(I);
Owen Anderson58704ee2011-09-06 18:14:09 +0000681
Nick Lewycky32f80512011-10-22 21:59:35 +0000682 // Okay, so these are dead heap objects, but if the pointer never escapes
683 // then it's leaked by this function anyways.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000684 else if (isAllocLikeFn(I, AA->getTargetLibraryInfo()) &&
685 !PointerMayBeCaptured(I, true, true))
Nuno Lopes55fff832012-06-21 15:45:28 +0000686 DeadStackObjects.insert(I);
Nick Lewycky32f80512011-10-22 21:59:35 +0000687 }
688
Chris Lattner1adb6752008-11-28 00:27:14 +0000689 // Treat byval arguments the same, stores to them are dead at the end of the
690 // function.
Owen Anderson48d37802008-01-29 06:18:36 +0000691 for (Function::arg_iterator AI = BB.getParent()->arg_begin(),
692 AE = BB.getParent()->arg_end(); AI != AE; ++AI)
693 if (AI->hasByValAttr())
Chris Lattner7fe08b62010-11-30 21:32:12 +0000694 DeadStackObjects.insert(AI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000695
Owen Anderson32c4a052007-07-12 21:41:30 +0000696 // Scan the basic block backwards
697 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
698 --BBI;
Owen Anderson58704ee2011-09-06 18:14:09 +0000699
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000700 // If we find a store, check to see if it points into a dead stack value.
701 if (hasMemoryWrite(BBI) && isRemovable(BBI)) {
702 // See through pointer-to-pointer bitcasts
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000703 SmallVector<Value *, 4> Pointers;
704 GetUnderlyingObjects(getStoredPointerOperand(BBI), Pointers);
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000705
Chris Lattner67122512010-11-30 21:58:14 +0000706 // Stores to stack values are valid candidates for removal.
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000707 bool AllDead = true;
708 for (SmallVectorImpl<Value *>::iterator I = Pointers.begin(),
709 E = Pointers.end(); I != E; ++I)
710 if (!DeadStackObjects.count(*I)) {
711 AllDead = false;
712 break;
713 }
714
715 if (AllDead) {
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000716 Instruction *Dead = BBI++;
Owen Anderson58704ee2011-09-06 18:14:09 +0000717
Chris Lattnerca335e32010-12-06 21:13:51 +0000718 DEBUG(dbgs() << "DSE: Dead Store at End of Block:\n DEAD: "
Dan Gohmaned7c24e22012-05-10 18:57:38 +0000719 << *Dead << "\n Objects: ";
720 for (SmallVectorImpl<Value *>::iterator I = Pointers.begin(),
721 E = Pointers.end(); I != E; ++I) {
722 dbgs() << **I;
723 if (llvm::next(I) != E)
724 dbgs() << ", ";
725 }
726 dbgs() << '\n');
Owen Anderson58704ee2011-09-06 18:14:09 +0000727
Chris Lattnerca335e32010-12-06 21:13:51 +0000728 // DCE instructions only used to calculate that store.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000729 DeleteDeadInstruction(Dead, *MD, AA->getTargetLibraryInfo(),
730 &DeadStackObjects);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000731 ++NumFastStores;
732 MadeChange = true;
Owen Andersone316e5b2011-08-30 21:11:06 +0000733 continue;
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000734 }
Owen Anderson52aaabf2007-08-08 17:50:09 +0000735 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000736
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000737 // Remove any dead non-memory-mutating instructions.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000738 if (isInstructionTriviallyDead(BBI, AA->getTargetLibraryInfo())) {
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000739 Instruction *Inst = BBI++;
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000740 DeleteDeadInstruction(Inst, *MD, AA->getTargetLibraryInfo(),
741 &DeadStackObjects);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000742 ++NumFastOther;
743 MadeChange = true;
744 continue;
745 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000746
Eli Friedman08ec0a82012-08-08 02:17:32 +0000747 if (isa<AllocaInst>(BBI)) {
748 // Remove allocas from the list of dead stack objects; there can't be
749 // any references before the definition.
Nuno Lopes55fff832012-06-21 15:45:28 +0000750 DeadStackObjects.remove(BBI);
Nuno Lopes300d6292012-05-10 17:14:00 +0000751 continue;
752 }
753
Chris Lattner127818d2010-11-30 21:18:46 +0000754 if (CallSite CS = cast<Value>(BBI)) {
Eli Friedman08ec0a82012-08-08 02:17:32 +0000755 // Remove allocation function calls from the list of dead stack objects;
756 // there can't be any references before the definition.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000757 if (isAllocLikeFn(BBI, AA->getTargetLibraryInfo()))
Eli Friedman08ec0a82012-08-08 02:17:32 +0000758 DeadStackObjects.remove(BBI);
759
Chris Lattner127818d2010-11-30 21:18:46 +0000760 // If this call does not access memory, it can't be loading any of our
761 // pointers.
762 if (AA->doesNotAccessMemory(CS))
763 continue;
Owen Anderson58704ee2011-09-06 18:14:09 +0000764
Chris Lattner127818d2010-11-30 21:18:46 +0000765 // If the call might load from any of our allocas, then any store above
766 // the call is live.
767 SmallVector<Value*, 8> LiveAllocas;
Evan Cheng773b2cd2012-06-16 04:28:11 +0000768 for (SmallSetVector<Value*, 16>::iterator I = DeadStackObjects.begin(),
Chris Lattner7fe08b62010-11-30 21:32:12 +0000769 E = DeadStackObjects.end(); I != E; ++I) {
Chris Lattner127818d2010-11-30 21:18:46 +0000770 // See if the call site touches it.
Owen Anderson58704ee2011-09-06 18:14:09 +0000771 AliasAnalysis::ModRefResult A =
Chris Lattner127818d2010-11-30 21:18:46 +0000772 AA->getModRefInfo(CS, *I, getPointerSize(*I, *AA));
Owen Anderson58704ee2011-09-06 18:14:09 +0000773
Chris Lattner127818d2010-11-30 21:18:46 +0000774 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref)
775 LiveAllocas.push_back(*I);
776 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000777
Benjamin Kramer2b11eb02012-09-09 16:44:05 +0000778 // If all of the allocas were clobbered by the call then we're not going
779 // to find anything else to process.
780 if (DeadStackObjects.size() == LiveAllocas.size())
781 break;
782
Chris Lattner127818d2010-11-30 21:18:46 +0000783 for (SmallVector<Value*, 8>::iterator I = LiveAllocas.begin(),
784 E = LiveAllocas.end(); I != E; ++I)
Evan Cheng773b2cd2012-06-16 04:28:11 +0000785 DeadStackObjects.remove(*I);
Owen Anderson58704ee2011-09-06 18:14:09 +0000786
Chris Lattner127818d2010-11-30 21:18:46 +0000787 continue;
788 }
Eli Friedman89b694b2011-07-27 01:08:30 +0000789
Chris Lattner51d67ce2010-11-30 21:47:58 +0000790 AliasAnalysis::Location LoadedLoc;
Owen Anderson58704ee2011-09-06 18:14:09 +0000791
Owen Anderson32c4a052007-07-12 21:41:30 +0000792 // If we encounter a use of the pointer, it is no longer considered dead
Chris Lattner1adb6752008-11-28 00:27:14 +0000793 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
Eli Friedman9a468152011-08-17 22:22:24 +0000794 if (!L->isUnordered()) // Be conservative with atomic/volatile load
795 break;
Chris Lattner51d67ce2010-11-30 21:47:58 +0000796 LoadedLoc = AA->getLocation(L);
Nick Lewycky475d3d12010-01-03 04:39:07 +0000797 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
Chris Lattner51d67ce2010-11-30 21:47:58 +0000798 LoadedLoc = AA->getLocation(V);
Chris Lattner60a8b3d2010-11-30 19:48:15 +0000799 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(BBI)) {
Chris Lattner51d67ce2010-11-30 21:47:58 +0000800 LoadedLoc = AA->getLocationForSource(MTI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000801 } else if (!BBI->mayReadFromMemory()) {
802 // Instruction doesn't read memory. Note that stores that weren't removed
803 // above will hit this case.
Chris Lattner1adb6752008-11-28 00:27:14 +0000804 continue;
Eli Friedman89b694b2011-07-27 01:08:30 +0000805 } else {
806 // Unknown inst; assume it clobbers everything.
807 break;
Owen Anderson32c4a052007-07-12 21:41:30 +0000808 }
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000809
Chris Lattner7fe08b62010-11-30 21:32:12 +0000810 // Remove any allocas from the DeadPointer set that are loaded, as this
811 // makes any stores above the access live.
Chris Lattner51d67ce2010-11-30 21:47:58 +0000812 RemoveAccessedObjects(LoadedLoc, DeadStackObjects);
Duncan Sandsd65a4da2008-10-01 15:25:41 +0000813
Chris Lattner7fe08b62010-11-30 21:32:12 +0000814 // If all of the allocas were clobbered by the access then we're not going
815 // to find anything else to process.
816 if (DeadStackObjects.empty())
817 break;
Owen Anderson32c4a052007-07-12 21:41:30 +0000818 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000819
Owen Anderson32c4a052007-07-12 21:41:30 +0000820 return MadeChange;
821}
822
Chris Lattner7fe08b62010-11-30 21:32:12 +0000823/// RemoveAccessedObjects - Check to see if the specified location may alias any
824/// of the stack objects in the DeadStackObjects set. If so, they become live
825/// because the location is being loaded.
Chris Lattner51d67ce2010-11-30 21:47:58 +0000826void DSE::RemoveAccessedObjects(const AliasAnalysis::Location &LoadedLoc,
Evan Cheng773b2cd2012-06-16 04:28:11 +0000827 SmallSetVector<Value*, 16> &DeadStackObjects) {
Dan Gohmana4fcd242010-12-15 20:02:24 +0000828 const Value *UnderlyingPointer = GetUnderlyingObject(LoadedLoc.Ptr);
Chris Lattner7fe08b62010-11-30 21:32:12 +0000829
830 // A constant can't be in the dead pointer set.
831 if (isa<Constant>(UnderlyingPointer))
Chris Lattnerf80b3992010-11-30 21:38:30 +0000832 return;
Owen Anderson58704ee2011-09-06 18:14:09 +0000833
Chris Lattner7fe08b62010-11-30 21:32:12 +0000834 // If the kill pointer can be easily reduced to an alloca, don't bother doing
835 // extraneous AA queries.
Chris Lattnerf80b3992010-11-30 21:38:30 +0000836 if (isa<AllocaInst>(UnderlyingPointer) || isa<Argument>(UnderlyingPointer)) {
Evan Cheng773b2cd2012-06-16 04:28:11 +0000837 DeadStackObjects.remove(const_cast<Value*>(UnderlyingPointer));
Chris Lattnerf80b3992010-11-30 21:38:30 +0000838 return;
Owen Andersonddf4aee2007-08-08 18:38:28 +0000839 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000840
Chris Lattner7fe08b62010-11-30 21:32:12 +0000841 SmallVector<Value*, 16> NowLive;
Evan Cheng773b2cd2012-06-16 04:28:11 +0000842 for (SmallSetVector<Value*, 16>::iterator I = DeadStackObjects.begin(),
Chris Lattner7fe08b62010-11-30 21:32:12 +0000843 E = DeadStackObjects.end(); I != E; ++I) {
Chris Lattner51d67ce2010-11-30 21:47:58 +0000844 // See if the loaded location could alias the stack location.
845 AliasAnalysis::Location StackLoc(*I, getPointerSize(*I, *AA));
846 if (!AA->isNoAlias(StackLoc, LoadedLoc))
Chris Lattner7fe08b62010-11-30 21:32:12 +0000847 NowLive.push_back(*I);
Owen Anderson32c4a052007-07-12 21:41:30 +0000848 }
849
Chris Lattner7fe08b62010-11-30 21:32:12 +0000850 for (SmallVector<Value*, 16>::iterator I = NowLive.begin(), E = NowLive.end();
Owen Anderson32c4a052007-07-12 21:41:30 +0000851 I != E; ++I)
Evan Cheng773b2cd2012-06-16 04:28:11 +0000852 DeadStackObjects.remove(*I);
Owen Anderson32c4a052007-07-12 21:41:30 +0000853}